From 6f13f1230168bba424d3707acf8ca07fc0593b37 Mon Sep 17 00:00:00 2001 From: lcnr Date: Mon, 26 Sep 2022 13:00:29 +0200 Subject: [PATCH 1/4] rustc_typeck to rustc_hir_analysis --- crates/hir-ty/src/autoderef.rs | 2 +- crates/hir-ty/src/infer.rs | 2 +- crates/hir-ty/src/infer/coerce.rs | 2 +- crates/hir-ty/src/method_resolution.rs | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/hir-ty/src/autoderef.rs b/crates/hir-ty/src/autoderef.rs index 344036dd81..e106c4c2cf 100644 --- a/crates/hir-ty/src/autoderef.rs +++ b/crates/hir-ty/src/autoderef.rs @@ -1,7 +1,7 @@ //! In certain situations, rust automatically inserts derefs as necessary: for //! example, field accesses `foo.bar` still work when `foo` is actually a //! reference to a type with the field `bar`. This is an approximation of the -//! logic in rustc (which lives in librustc_typeck/check/autoderef.rs). +//! logic in rustc (which lives in rustc_hir_analysis/check/autoderef.rs). use std::sync::Arc; diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index e37763e8ea..9dbeba4f9f 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -2,7 +2,7 @@ //! the type of each expression and pattern. //! //! For type inference, compare the implementations in rustc (the various -//! check_* methods in librustc_typeck/check/mod.rs are a good entry point) and +//! check_* methods in rustc_hir_analysis/check/mod.rs are a good entry point) and //! IntelliJ-Rust (org.rust.lang.core.types.infer). Our entry point for //! inference here is the `infer` function, which infers the types of all //! expressions in a given function. diff --git a/crates/hir-ty/src/infer/coerce.rs b/crates/hir-ty/src/infer/coerce.rs index f54440bf5b..8df25c83c6 100644 --- a/crates/hir-ty/src/infer/coerce.rs +++ b/crates/hir-ty/src/infer/coerce.rs @@ -3,7 +3,7 @@ //! like going from `&Vec` to `&[T]`. //! //! See and -//! `librustc_typeck/check/coercion.rs`. +//! `rustc_hir_analysis/check/coercion.rs`. use std::{iter, sync::Arc}; diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index 41fcef73d9..cc21990d55 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -1,7 +1,7 @@ //! This module is concerned with finding methods that a given type provides. //! For details about how this works in rustc, see the method lookup page in the //! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html) -//! and the corresponding code mostly in librustc_typeck/check/method/probe.rs. +//! and the corresponding code mostly in rustc_hir_analysis/check/method/probe.rs. use std::{iter, ops::ControlFlow, sync::Arc}; use arrayvec::ArrayVec; From ed532e5a3426eb434b851cba14c591ad15c58999 Mon Sep 17 00:00:00 2001 From: Petr Portnov Date: Sun, 2 Oct 2022 21:40:39 +0300 Subject: [PATCH 2/4] Fix duplicate usage of `a` article. This fixes a typo first appearing in #94624 in which test-macro diagnostic uses "a" article twice. Since I searched sources for " a a " sequences, I also fixed the same issue in a few source files where I found it. Signed-off-by: Petr Portnov --- crates/ide-db/src/imports/merge_imports.rs | 2 +- crates/rust-analyzer/src/bin/logger.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/ide-db/src/imports/merge_imports.rs b/crates/ide-db/src/imports/merge_imports.rs index 7fb4b90e6d..371d642c15 100644 --- a/crates/ide-db/src/imports/merge_imports.rs +++ b/crates/ide-db/src/imports/merge_imports.rs @@ -225,7 +225,7 @@ fn path_cmp_short(a: &ast::Path, b: &ast::Path) -> Ordering { } /// Compares two paths, if one ends earlier than the other the has_tl parameters decide which is -/// greater as a a path that has a tree list should be greater, while one that just ends without +/// greater as a path that has a tree list should be greater, while one that just ends without /// a tree list should be considered less. pub(super) fn use_tree_path_cmp( a: &ast::Path, diff --git a/crates/rust-analyzer/src/bin/logger.rs b/crates/rust-analyzer/src/bin/logger.rs index 298814af5a..ac10721d95 100644 --- a/crates/rust-analyzer/src/bin/logger.rs +++ b/crates/rust-analyzer/src/bin/logger.rs @@ -132,7 +132,7 @@ where let ext = span.extensions(); - // `FormattedFields` is a a formatted representation of the span's + // `FormattedFields` is a formatted representation of the span's // fields, which is stored in its extensions by the `fmt` layer's // `new_span` method. The fields will have been formatted // by the same field formatter that's provided to the event From 3a57388d136518c063ffdad09e27394cfe66ecb3 Mon Sep 17 00:00:00 2001 From: Andres Suarez Date: Sat, 8 Oct 2022 23:25:40 -0400 Subject: [PATCH 3/4] update to syn-1.0.102 --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 216cf51447..6173f9a78c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1190,9 +1190,9 @@ version = "0.0.0" [[package]] name = "proc-macro2" -version = "1.0.43" +version = "1.0.46" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a2ca2c61bc9f3d74d2886294ab7b9853abd9c1ad903a3ac7815c58989bb7bab" +checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b" dependencies = [ "unicode-ident", ] @@ -1593,9 +1593,9 @@ dependencies = [ [[package]] name = "syn" -version = "1.0.99" +version = "1.0.102" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58dbef6ec655055e20b86b15a8cc6d439cca19b667537ac6a1369572d151ab13" +checksum = "3fcd952facd492f9be3ef0d0b7032a6e442ee9b361d4acc2b1d0c4aaa5f613a1" dependencies = [ "proc-macro2", "quote", From 4f55ebbd4fb2aaf6ac97e484c07504e5124422e7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Tue, 11 Oct 2022 10:37:35 +0300 Subject: [PATCH 4/4] :arrow_up: rust-analyzer --- Cargo.lock | 22 +- crates/base-db/src/fixture.rs | 8 +- crates/base-db/src/input.rs | 22 +- crates/flycheck/src/lib.rs | 13 +- crates/hir-def/src/adt.rs | 244 +++++++++++++--- crates/hir-def/src/body.rs | 8 +- crates/hir-def/src/body/lower.rs | 26 +- crates/hir-def/src/body/pretty.rs | 19 +- crates/hir-def/src/child_by_source.rs | 4 + crates/hir-def/src/data.rs | 8 +- crates/hir-def/src/db.rs | 15 +- crates/hir-def/src/expr.rs | 13 + crates/hir-def/src/find_path.rs | 2 +- crates/hir-def/src/item_tree.rs | 8 + crates/hir-def/src/item_tree/lower.rs | 9 +- crates/hir-def/src/item_tree/pretty.rs | 6 +- crates/hir-def/src/lib.rs | 10 + crates/hir-def/src/nameres/collector.rs | 2 +- crates/hir-def/src/nameres/diagnostics.rs | 6 +- crates/hir-def/src/resolver.rs | 1 + crates/hir-expand/src/ast_id_map.rs | 7 +- crates/hir-expand/src/lib.rs | 25 ++ crates/hir-expand/src/name.rs | 1 + crates/hir-ty/src/autoderef.rs | 5 +- crates/hir-ty/src/builder.rs | 217 ++++++++------ crates/hir-ty/src/chalk_db.rs | 71 ++++- crates/hir-ty/src/chalk_ext.rs | 4 +- crates/hir-ty/src/consteval.rs | 108 +++++-- crates/hir-ty/src/consteval/tests.rs | 43 +++ crates/hir-ty/src/db.rs | 19 +- crates/hir-ty/src/diagnostics/unsafe_check.rs | 4 +- crates/hir-ty/src/display.rs | 47 +++- crates/hir-ty/src/infer.rs | 37 ++- crates/hir-ty/src/infer/closure.rs | 6 + crates/hir-ty/src/infer/expr.rs | 114 +++++--- crates/hir-ty/src/infer/path.rs | 24 +- crates/hir-ty/src/infer/unify.rs | 7 +- crates/hir-ty/src/lower.rs | 235 ++++++++++------ crates/hir-ty/src/mapping.rs | 12 + crates/hir-ty/src/method_resolution.rs | 34 +-- crates/hir-ty/src/tests.rs | 22 +- crates/hir-ty/src/tests/coercion.rs | 18 ++ crates/hir-ty/src/tests/regression.rs | 54 ++-- crates/hir-ty/src/tests/simple.rs | 159 +++++++++-- crates/hir-ty/src/tests/traits.rs | 12 +- crates/hir-ty/src/utils.rs | 101 +++---- crates/hir/src/diagnostics.rs | 2 +- crates/hir/src/display.rs | 3 + crates/hir/src/from_id.rs | 10 +- crates/hir/src/lib.rs | 108 ++++++- crates/hir/src/semantics.rs | 15 + crates/hir/src/semantics/source_to_def.rs | 8 +- crates/hir/src/source_analyzer.rs | 46 ++- crates/hir/src/symbols.rs | 4 + .../src/handlers/move_format_string_arg.rs | 42 ++- .../ide-assists/src/handlers/unwrap_tuple.rs | 159 +++++++++++ crates/ide-assists/src/lib.rs | 2 + crates/ide-assists/src/tests.rs | 12 +- crates/ide-assists/src/tests/generated.rs | 19 ++ .../src/completions/item_list/trait_impl.rs | 51 ++-- crates/ide-db/src/search.rs | 1 + .../src/handlers/inactive_code.rs | 31 ++ crates/ide/src/annotations.rs | 264 +++++++++++------- .../src/{ => annotations}/fn_references.rs | 37 ++- crates/ide/src/doc_links.rs | 9 +- crates/ide/src/goto_definition.rs | 51 +++- crates/ide/src/highlight_related.rs | 16 ++ crates/ide/src/hover/render.rs | 11 +- crates/ide/src/hover/tests.rs | 136 +++++++++ crates/ide/src/inlay_hints.rs | 15 +- crates/ide/src/lib.rs | 8 +- crates/ide/src/moniker.rs | 10 +- .../ide/src/syntax_highlighting/highlight.rs | 11 +- .../test_data/highlight_assoc_functions.html | 6 +- .../test_data/highlight_doctest.html | 2 +- .../test_data/highlight_general.html | 12 +- .../test_data/highlight_injection.html | 2 +- .../test_data/highlight_lifetimes.html | 4 +- .../test_data/highlight_strings.html | 8 +- .../test_data/highlight_unsafe.html | 8 +- .../src/abis/abi_sysroot/mod.rs | 2 + crates/proc-macro-srv/src/abis/mod.rs | 11 +- crates/proc-macro-srv/src/lib.rs | 9 +- crates/proc-macro-srv/src/tests/mod.rs | 11 +- crates/proc-macro-srv/src/tests/utils.rs | 12 +- crates/project-model/src/build_scripts.rs | 22 +- crates/project-model/src/cargo_workspace.rs | 134 ++++----- crates/project-model/src/lib.rs | 4 +- crates/project-model/src/project_json.rs | 3 + crates/project-model/src/rustc_cfg.rs | 13 +- crates/project-model/src/sysroot.rs | 65 +++-- crates/project-model/src/tests.rs | 54 +++- crates/project-model/src/workspace.rs | 132 +++++---- crates/rust-analyzer/Cargo.toml | 5 +- crates/rust-analyzer/src/bin/main.rs | 11 +- crates/rust-analyzer/src/cargo_target_spec.rs | 82 +++--- .../rust-analyzer/src/cli/analysis_stats.rs | 9 +- crates/rust-analyzer/src/cli/flags.rs | 31 +- crates/rust-analyzer/src/cli/load_cargo.rs | 30 +- crates/rust-analyzer/src/cli/lsif.rs | 2 +- crates/rust-analyzer/src/cli/scip.rs | 3 +- crates/rust-analyzer/src/config.rs | 65 +++-- crates/rust-analyzer/src/from_proto.rs | 10 +- crates/rust-analyzer/src/global_state.rs | 43 ++- crates/rust-analyzer/src/main_loop.rs | 4 +- crates/rust-analyzer/src/reload.rs | 65 +++-- crates/rust-analyzer/src/to_proto.rs | 12 +- crates/rust-analyzer/tests/slow-tests/main.rs | 12 +- .../rust-analyzer/tests/slow-tests/support.rs | 2 +- crates/sourcegen/Cargo.toml | 3 - crates/syntax/src/ast/node_ext.rs | 30 ++ crates/test-utils/src/minicore.rs | 48 ++++ docs/dev/README.md | 2 +- docs/dev/guide.md | 4 +- docs/user/generated_config.adoc | 10 +- editors/code/package.json | 13 +- lib/lsp-server/Cargo.toml | 10 +- lib/lsp-server/src/msg.rs | 10 +- lib/lsp-server/src/req_queue.rs | 7 + xtask/Cargo.toml | 2 +- xtask/src/flags.rs | 19 +- xtask/src/main.rs | 7 +- 122 files changed, 2885 insertions(+), 1093 deletions(-) create mode 100644 crates/ide-assists/src/handlers/unwrap_tuple.rs rename crates/ide/src/{ => annotations}/fn_references.rs (61%) diff --git a/Cargo.lock b/Cargo.lock index 6173f9a78c..744330b142 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -863,7 +863,7 @@ dependencies = [ [[package]] name = "lsp-server" -version = "0.6.0" +version = "0.7.0" dependencies = [ "crossbeam-channel", "log", @@ -1502,18 +1502,18 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.143" +version = "1.0.144" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53e8e5d5b70924f74ff5c6d64d9a5acd91422117c60f48c4e07855238a254553" +checksum = "0f747710de3dcd43b88c9168773254e809d8ddbdf9653b84e2554ab219f17860" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.143" +version = "1.0.144" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3d8e8de557aee63c26b85b947f5e59b690d0454c753f3adeb5cd7835ab88391" +checksum = "94ed3a816fb1d101812f83e789f888322c34e291f894f19590dc310963e87a00" dependencies = [ "proc-macro2", "quote", @@ -1522,9 +1522,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.83" +version = "1.0.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38dd04e3c8279e75b31ef29dbdceebfe5ad89f4d0937213c53f7d49d01b3d5a7" +checksum = "e55a28e3aaef9d5ce0506d0a14dbba8054ddc7e499ef522dd8b26859ec9d4a44" dependencies = [ "indexmap", "itoa", @@ -2082,18 +2082,18 @@ checksum = "06069a848f95fceae3e5e03c0ddc8cb78452b56654ee0c8e68f938cf790fb9e3" [[package]] name = "xflags" -version = "0.2.4" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f14fe1ed41a5a2b5ef3f565586c4a8a559ee55d3953faab360a771135bdee00" +checksum = "cbf19f5031a1a812e96fede16f8161218883079946cea87619d3613db1efd268" dependencies = [ "xflags-macros", ] [[package]] name = "xflags-macros" -version = "0.2.4" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45d11d5fc2a97287eded8b170ca80533b3c42646dd7fa386a5eb045817921022" +checksum = "2afbd7f2039bb6cad2dd45f0c5dff49c0d4e26118398768b7a605524d4251809" [[package]] name = "xshell" diff --git a/crates/base-db/src/fixture.rs b/crates/base-db/src/fixture.rs index 8e6e6a11ab..5b7828a269 100644 --- a/crates/base-db/src/fixture.rs +++ b/crates/base-db/src/fixture.rs @@ -196,7 +196,7 @@ impl ChangeFixture { Env::default(), Ok(Vec::new()), false, - CrateOrigin::CratesIo { repo: None }, + CrateOrigin::CratesIo { repo: None, name: None }, ); } else { for (from, to, prelude) in crate_deps { @@ -270,7 +270,7 @@ impl ChangeFixture { Env::default(), Ok(proc_macro), true, - CrateOrigin::CratesIo { repo: None }, + CrateOrigin::CratesIo { repo: None, name: None }, ); for krate in all_crates { @@ -398,7 +398,7 @@ fn parse_crate(crate_str: String) -> (String, CrateOrigin, Option) { let (version, origin) = match b.split_once(':') { Some(("CratesIo", data)) => match data.split_once(',') { Some((version, url)) => { - (version, CrateOrigin::CratesIo { repo: Some(url.to_owned()) }) + (version, CrateOrigin::CratesIo { repo: Some(url.to_owned()), name: None }) } _ => panic!("Bad crates.io parameter: {}", data), }, @@ -409,7 +409,7 @@ fn parse_crate(crate_str: String) -> (String, CrateOrigin, Option) { let crate_origin = match &*crate_str { "std" => CrateOrigin::Lang(LangCrateOrigin::Std), "core" => CrateOrigin::Lang(LangCrateOrigin::Core), - _ => CrateOrigin::CratesIo { repo: None }, + _ => CrateOrigin::CratesIo { repo: None, name: None }, }; (crate_str, crate_origin, None) } diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index b388e47dee..e7f0c4ec29 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -136,7 +136,7 @@ impl ops::Deref for CrateName { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum CrateOrigin { /// Crates that are from crates.io official registry, - CratesIo { repo: Option }, + CratesIo { repo: Option, name: Option }, /// Crates that are provided by the language, like std, core, proc-macro, ... Lang(LangCrateOrigin), } @@ -648,7 +648,7 @@ mod tests { Env::default(), Ok(Vec::new()), false, - CrateOrigin::CratesIo { repo: None }, + CrateOrigin::CratesIo { repo: None, name: None }, ); let crate2 = graph.add_crate_root( FileId(2u32), @@ -660,7 +660,7 @@ mod tests { Env::default(), Ok(Vec::new()), false, - CrateOrigin::CratesIo { repo: None }, + CrateOrigin::CratesIo { repo: None, name: None }, ); let crate3 = graph.add_crate_root( FileId(3u32), @@ -672,7 +672,7 @@ mod tests { Env::default(), Ok(Vec::new()), false, - CrateOrigin::CratesIo { repo: None }, + CrateOrigin::CratesIo { repo: None, name: None }, ); assert!(graph .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2)) @@ -698,7 +698,7 @@ mod tests { Env::default(), Ok(Vec::new()), false, - CrateOrigin::CratesIo { repo: None }, + CrateOrigin::CratesIo { repo: None, name: None }, ); let crate2 = graph.add_crate_root( FileId(2u32), @@ -710,7 +710,7 @@ mod tests { Env::default(), Ok(Vec::new()), false, - CrateOrigin::CratesIo { repo: None }, + CrateOrigin::CratesIo { repo: None, name: None }, ); assert!(graph .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2)) @@ -733,7 +733,7 @@ mod tests { Env::default(), Ok(Vec::new()), false, - CrateOrigin::CratesIo { repo: None }, + CrateOrigin::CratesIo { repo: None, name: None }, ); let crate2 = graph.add_crate_root( FileId(2u32), @@ -745,7 +745,7 @@ mod tests { Env::default(), Ok(Vec::new()), false, - CrateOrigin::CratesIo { repo: None }, + CrateOrigin::CratesIo { repo: None, name: None }, ); let crate3 = graph.add_crate_root( FileId(3u32), @@ -757,7 +757,7 @@ mod tests { Env::default(), Ok(Vec::new()), false, - CrateOrigin::CratesIo { repo: None }, + CrateOrigin::CratesIo { repo: None, name: None }, ); assert!(graph .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2)) @@ -780,7 +780,7 @@ mod tests { Env::default(), Ok(Vec::new()), false, - CrateOrigin::CratesIo { repo: None }, + CrateOrigin::CratesIo { repo: None, name: None }, ); let crate2 = graph.add_crate_root( FileId(2u32), @@ -792,7 +792,7 @@ mod tests { Env::default(), Ok(Vec::new()), false, - CrateOrigin::CratesIo { repo: None }, + CrateOrigin::CratesIo { repo: None, name: None }, ); assert!(graph .add_dep( diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index fdc03f4053..e8c63d410a 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -169,13 +169,17 @@ impl FlycheckActor { } fn next_event(&self, inbox: &Receiver) -> Option { let check_chan = self.cargo_handle.as_ref().map(|cargo| &cargo.receiver); + if let Ok(msg) = inbox.try_recv() { + // give restarts a preference so check outputs don't block a restart or stop + return Some(Event::Restart(msg)); + } select! { recv(inbox) -> msg => msg.ok().map(Event::Restart), recv(check_chan.unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())), } } fn run(mut self, inbox: Receiver) { - while let Some(event) = self.next_event(&inbox) { + 'event: while let Some(event) = self.next_event(&inbox) { match event { Event::Restart(Restart::No) => { self.cancel_check_process(); @@ -183,7 +187,12 @@ impl FlycheckActor { Event::Restart(Restart::Yes) => { // Cancel the previously spawned process self.cancel_check_process(); - while let Ok(_) = inbox.recv_timeout(Duration::from_millis(50)) {} + while let Ok(restart) = inbox.recv_timeout(Duration::from_millis(50)) { + // restart chained with a stop, so just cancel + if let Restart::No = restart { + continue 'event; + } + } let command = self.check_command(); tracing::debug!(?command, "will restart flycheck"); diff --git a/crates/hir-def/src/adt.rs b/crates/hir-def/src/adt.rs index 277135d6dc..938db032fb 100644 --- a/crates/hir-def/src/adt.rs +++ b/crates/hir-def/src/adt.rs @@ -1,12 +1,12 @@ //! Defines hir-level representation of structs, enums and unions -use std::sync::Arc; +use std::{num::NonZeroU32, sync::Arc}; use base_db::CrateId; use either::Either; use hir_expand::{ name::{AsName, Name}, - InFile, + HirFileId, InFile, }; use la_arena::{Arena, ArenaMap}; use syntax::ast::{self, HasName, HasVisibility}; @@ -14,15 +14,18 @@ use tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree}; use crate::{ body::{CfgExpander, LowerCtx}, + builtin_type::{BuiltinInt, BuiltinUint}, db::DefDatabase, intern::Interned, - item_tree::{AttrOwner, Field, Fields, ItemTree, ModItem, RawVisibilityId}, + item_tree::{AttrOwner, Field, FieldAstId, Fields, ItemTree, ModItem, RawVisibilityId}, + nameres::diagnostics::DefDiagnostic, src::HasChildSource, src::HasSource, trace::Trace, type_ref::TypeRef, visibility::RawVisibility, - EnumId, LocalEnumVariantId, LocalFieldId, Lookup, ModuleId, StructId, UnionId, VariantId, + EnumId, LocalEnumVariantId, LocalFieldId, LocalModuleId, Lookup, ModuleId, StructId, UnionId, + VariantId, }; use cfg::CfgOptions; @@ -31,7 +34,7 @@ use cfg::CfgOptions; pub struct StructData { pub name: Name, pub variant_data: Arc, - pub repr: Option, + pub repr: Option, pub visibility: RawVisibility, } @@ -39,6 +42,7 @@ pub struct StructData { pub struct EnumData { pub name: Name, pub variants: Arena, + pub repr: Option, pub visibility: RawVisibility, } @@ -63,10 +67,19 @@ pub struct FieldData { pub visibility: RawVisibility, } -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Copy, Debug, Clone, PartialEq, Eq)] pub enum ReprKind { - Packed, - Other, + C, + BuiltinInt { builtin: Either, is_c: bool }, + Transparent, + Default, +} + +#[derive(Copy, Debug, Clone, PartialEq, Eq)] +pub struct ReprData { + pub kind: ReprKind, + pub packed: bool, + pub align: Option, } fn repr_from_value( @@ -74,25 +87,71 @@ fn repr_from_value( krate: CrateId, item_tree: &ItemTree, of: AttrOwner, -) -> Option { +) -> Option { item_tree.attrs(db, krate, of).by_key("repr").tt_values().find_map(parse_repr_tt) } -fn parse_repr_tt(tt: &Subtree) -> Option { +fn parse_repr_tt(tt: &Subtree) -> Option { match tt.delimiter { Some(Delimiter { kind: DelimiterKind::Parenthesis, .. }) => {} _ => return None, } - let mut it = tt.token_trees.iter(); - match it.next()? { - TokenTree::Leaf(Leaf::Ident(ident)) if ident.text == "packed" => Some(ReprKind::Packed), - _ => Some(ReprKind::Other), + let mut data = ReprData { kind: ReprKind::Default, packed: false, align: None }; + + let mut tts = tt.token_trees.iter().peekable(); + while let Some(tt) = tts.next() { + if let TokenTree::Leaf(Leaf::Ident(ident)) = tt { + match &*ident.text { + "packed" => { + data.packed = true; + if let Some(TokenTree::Subtree(_)) = tts.peek() { + tts.next(); + } + } + "align" => { + if let Some(TokenTree::Subtree(tt)) = tts.peek() { + tts.next(); + if let Some(TokenTree::Leaf(Leaf::Literal(lit))) = tt.token_trees.first() { + if let Ok(align) = lit.text.parse() { + data.align = Some(align); + } + } + } + } + "C" => { + if let ReprKind::BuiltinInt { is_c, .. } = &mut data.kind { + *is_c = true; + } else { + data.kind = ReprKind::C; + } + } + "transparent" => data.kind = ReprKind::Transparent, + repr => { + let is_c = matches!(data.kind, ReprKind::C); + if let Some(builtin) = BuiltinInt::from_suffix(repr) + .map(Either::Left) + .or_else(|| BuiltinUint::from_suffix(repr).map(Either::Right)) + { + data.kind = ReprKind::BuiltinInt { builtin, is_c }; + } + } + } + } } + + Some(data) } impl StructData { pub(crate) fn struct_data_query(db: &dyn DefDatabase, id: StructId) -> Arc { + db.struct_data_with_diagnostics(id).0 + } + + pub(crate) fn struct_data_with_diagnostics_query( + db: &dyn DefDatabase, + id: StructId, + ) -> (Arc, Arc<[DefDiagnostic]>) { let loc = id.lookup(db); let krate = loc.container.krate; let item_tree = loc.id.item_tree(db); @@ -100,15 +159,35 @@ impl StructData { let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone(); let strukt = &item_tree[loc.id.value]; - let variant_data = lower_fields(db, krate, &item_tree, &cfg_options, &strukt.fields, None); - Arc::new(StructData { - name: strukt.name.clone(), - variant_data: Arc::new(variant_data), - repr, - visibility: item_tree[strukt.visibility].clone(), - }) + let (variant_data, diagnostics) = lower_fields( + db, + krate, + loc.id.file_id(), + loc.container.local_id, + &item_tree, + &cfg_options, + &strukt.fields, + None, + ); + ( + Arc::new(StructData { + name: strukt.name.clone(), + variant_data: Arc::new(variant_data), + repr, + visibility: item_tree[strukt.visibility].clone(), + }), + diagnostics.into(), + ) } + pub(crate) fn union_data_query(db: &dyn DefDatabase, id: UnionId) -> Arc { + db.union_data_with_diagnostics(id).0 + } + + pub(crate) fn union_data_with_diagnostics_query( + db: &dyn DefDatabase, + id: UnionId, + ) -> (Arc, Arc<[DefDiagnostic]>) { let loc = id.lookup(db); let krate = loc.container.krate; let item_tree = loc.id.item_tree(db); @@ -116,56 +195,98 @@ impl StructData { let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone(); let union = &item_tree[loc.id.value]; - let variant_data = lower_fields(db, krate, &item_tree, &cfg_options, &union.fields, None); - - Arc::new(StructData { - name: union.name.clone(), - variant_data: Arc::new(variant_data), - repr, - visibility: item_tree[union.visibility].clone(), - }) + let (variant_data, diagnostics) = lower_fields( + db, + krate, + loc.id.file_id(), + loc.container.local_id, + &item_tree, + &cfg_options, + &union.fields, + None, + ); + ( + Arc::new(StructData { + name: union.name.clone(), + variant_data: Arc::new(variant_data), + repr, + visibility: item_tree[union.visibility].clone(), + }), + diagnostics.into(), + ) } } impl EnumData { pub(crate) fn enum_data_query(db: &dyn DefDatabase, e: EnumId) -> Arc { + db.enum_data_with_diagnostics(e).0 + } + + pub(crate) fn enum_data_with_diagnostics_query( + db: &dyn DefDatabase, + e: EnumId, + ) -> (Arc, Arc<[DefDiagnostic]>) { let loc = e.lookup(db); let krate = loc.container.krate; let item_tree = loc.id.item_tree(db); let cfg_options = db.crate_graph()[krate].cfg_options.clone(); + let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into()); let enum_ = &item_tree[loc.id.value]; let mut variants = Arena::new(); + let mut diagnostics = Vec::new(); for tree_id in enum_.variants.clone() { - if item_tree.attrs(db, krate, tree_id.into()).is_cfg_enabled(&cfg_options) { - let var = &item_tree[tree_id]; - let var_data = lower_fields( + let attrs = item_tree.attrs(db, krate, tree_id.into()); + let var = &item_tree[tree_id]; + if attrs.is_cfg_enabled(&cfg_options) { + let (var_data, field_diagnostics) = lower_fields( db, krate, + loc.id.file_id(), + loc.container.local_id, &item_tree, &cfg_options, &var.fields, Some(enum_.visibility), ); + diagnostics.extend(field_diagnostics); variants.alloc(EnumVariantData { name: var.name.clone(), variant_data: Arc::new(var_data), }); + } else { + diagnostics.push(DefDiagnostic::unconfigured_code( + loc.container.local_id, + InFile::new(loc.id.file_id(), var.ast_id.upcast()), + attrs.cfg().unwrap(), + cfg_options.clone(), + )) } } - Arc::new(EnumData { - name: enum_.name.clone(), - variants, - visibility: item_tree[enum_.visibility].clone(), - }) + ( + Arc::new(EnumData { + name: enum_.name.clone(), + variants, + repr, + visibility: item_tree[enum_.visibility].clone(), + }), + diagnostics.into(), + ) } pub fn variant(&self, name: &Name) -> Option { let (id, _) = self.variants.iter().find(|(_id, data)| &data.name == name)?; Some(id) } + + pub fn variant_body_type(&self) -> Either { + match self.repr { + Some(ReprData { kind: ReprKind::BuiltinInt { builtin, .. }, .. }) => builtin, + _ => Either::Left(BuiltinInt::Isize), + } + } } impl HasChildSource for EnumId { @@ -324,31 +445,64 @@ fn lower_struct( fn lower_fields( db: &dyn DefDatabase, krate: CrateId, + current_file_id: HirFileId, + container: LocalModuleId, item_tree: &ItemTree, cfg_options: &CfgOptions, fields: &Fields, override_visibility: Option, -) -> VariantData { +) -> (VariantData, Vec) { + let mut diagnostics = Vec::new(); match fields { Fields::Record(flds) => { let mut arena = Arena::new(); for field_id in flds.clone() { - if item_tree.attrs(db, krate, field_id.into()).is_cfg_enabled(cfg_options) { - arena.alloc(lower_field(item_tree, &item_tree[field_id], override_visibility)); + let attrs = item_tree.attrs(db, krate, field_id.into()); + let field = &item_tree[field_id]; + if attrs.is_cfg_enabled(cfg_options) { + arena.alloc(lower_field(item_tree, field, override_visibility)); + } else { + diagnostics.push(DefDiagnostic::unconfigured_code( + container, + InFile::new( + current_file_id, + match field.ast_id { + FieldAstId::Record(it) => it.upcast(), + FieldAstId::Tuple(it) => it.upcast(), + }, + ), + attrs.cfg().unwrap(), + cfg_options.clone(), + )) } } - VariantData::Record(arena) + (VariantData::Record(arena), diagnostics) } Fields::Tuple(flds) => { let mut arena = Arena::new(); for field_id in flds.clone() { - if item_tree.attrs(db, krate, field_id.into()).is_cfg_enabled(cfg_options) { - arena.alloc(lower_field(item_tree, &item_tree[field_id], override_visibility)); + let attrs = item_tree.attrs(db, krate, field_id.into()); + let field = &item_tree[field_id]; + if attrs.is_cfg_enabled(cfg_options) { + arena.alloc(lower_field(item_tree, field, override_visibility)); + } else { + diagnostics.push(DefDiagnostic::unconfigured_code( + container, + InFile::new( + current_file_id, + match field.ast_id { + FieldAstId::Record(it) => it.upcast(), + FieldAstId::Tuple(it) => it.upcast(), + }, + ), + attrs.cfg().unwrap(), + cfg_options.clone(), + )) } } - VariantData::Tuple(arena) + (VariantData::Tuple(arena), diagnostics) } - Fields::Unit => VariantData::Unit, + Fields::Unit => (VariantData::Unit, diagnostics), } } diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs index 22f5fb9926..2dc7714bbb 100644 --- a/crates/hir-def/src/body.rs +++ b/crates/hir-def/src/body.rs @@ -27,7 +27,7 @@ use crate::{ macro_id_to_def_id, nameres::DefMap, path::{ModPath, Path}, - src::HasSource, + src::{HasChildSource, HasSource}, AsMacroCall, BlockId, DefWithBodyId, HasModule, LocalModuleId, Lookup, MacroId, ModuleId, UnresolvedMacro, }; @@ -324,6 +324,12 @@ impl Body { let src = s.source(db); (src.file_id, s.module(db), src.value.body()) } + DefWithBodyId::VariantId(v) => { + let e = v.parent.lookup(db); + let src = v.parent.child_source(db); + let variant = &src.value[v.local_id]; + (src.file_id, e.container, variant.expr()) + } }; let expander = Expander::new(db, file_id, module); let (mut body, source_map) = Body::new(db, expander, params, body); diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index 3b3297f781..c4f91e49a6 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -29,8 +29,9 @@ use crate::{ builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint}, db::DefDatabase, expr::{ - dummy_expr_id, Array, BindingAnnotation, Expr, ExprId, FloatTypeWrapper, Label, LabelId, - Literal, MatchArm, Pat, PatId, RecordFieldPat, RecordLitField, Statement, + dummy_expr_id, Array, BindingAnnotation, ClosureKind, Expr, ExprId, FloatTypeWrapper, + Label, LabelId, Literal, MatchArm, Movability, Pat, PatId, RecordFieldPat, RecordLitField, + Statement, }, intern::Interned, item_scope::BuiltinShadowMode, @@ -97,6 +98,7 @@ pub(super) fn lower( name_to_pat_grouping: Default::default(), is_lowering_inside_or_pat: false, is_lowering_assignee_expr: false, + is_lowering_generator: false, } .collect(params, body) } @@ -111,6 +113,7 @@ struct ExprCollector<'a> { name_to_pat_grouping: FxHashMap>, is_lowering_inside_or_pat: bool, is_lowering_assignee_expr: bool, + is_lowering_generator: bool, } impl ExprCollector<'_> { @@ -358,6 +361,7 @@ impl ExprCollector<'_> { self.alloc_expr(Expr::Return { expr }, syntax_ptr) } ast::Expr::YieldExpr(e) => { + self.is_lowering_generator = true; let expr = e.expr().map(|e| self.collect_expr(e)); self.alloc_expr(Expr::Yield { expr }, syntax_ptr) } @@ -459,13 +463,31 @@ impl ExprCollector<'_> { .ret_type() .and_then(|r| r.ty()) .map(|it| Interned::new(TypeRef::from_ast(&self.ctx(), it))); + + let prev_is_lowering_generator = self.is_lowering_generator; + self.is_lowering_generator = false; + let body = self.collect_expr_opt(e.body()); + + let closure_kind = if self.is_lowering_generator { + let movability = if e.static_token().is_some() { + Movability::Static + } else { + Movability::Movable + }; + ClosureKind::Generator(movability) + } else { + ClosureKind::Closure + }; + self.is_lowering_generator = prev_is_lowering_generator; + self.alloc_expr( Expr::Closure { args: args.into(), arg_types: arg_types.into(), ret_type, body, + closure_kind, }, syntax_ptr, ) diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs index f2fed95444..162d173d52 100644 --- a/crates/hir-def/src/body/pretty.rs +++ b/crates/hir-def/src/body/pretty.rs @@ -2,8 +2,10 @@ use std::fmt::{self, Write}; +use syntax::ast::HasName; + use crate::{ - expr::{Array, BindingAnnotation, Literal, Statement}, + expr::{Array, BindingAnnotation, ClosureKind, Literal, Movability, Statement}, pretty::{print_generic_args, print_path, print_type_ref}, type_ref::TypeRef, }; @@ -32,6 +34,16 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo }; format!("const {} = ", name) } + DefWithBodyId::VariantId(it) => { + needs_semi = false; + let src = it.parent.child_source(db); + let variant = &src.value[it.local_id]; + let name = match &variant.name() { + Some(name) => name.to_string(), + None => "_".to_string(), + }; + format!("{}", name) + } }; let mut p = Printer { body, buf: header, indent_level: 0, needs_indent: false }; @@ -350,7 +362,10 @@ impl<'a> Printer<'a> { self.print_expr(*index); w!(self, "]"); } - Expr::Closure { args, arg_types, ret_type, body } => { + Expr::Closure { args, arg_types, ret_type, body, closure_kind } => { + if let ClosureKind::Generator(Movability::Static) = closure_kind { + w!(self, "static "); + } w!(self, "|"); for (i, (pat, ty)) in args.iter().zip(arg_types.iter()).enumerate() { if i != 0 { diff --git a/crates/hir-def/src/child_by_source.rs b/crates/hir-def/src/child_by_source.rs index 5b1435e8f4..bb13165257 100644 --- a/crates/hir-def/src/child_by_source.rs +++ b/crates/hir-def/src/child_by_source.rs @@ -198,6 +198,10 @@ impl ChildBySource for EnumId { impl ChildBySource for DefWithBodyId { fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) { let body = db.body(*self); + if let &DefWithBodyId::VariantId(v) = self { + VariantId::EnumVariantId(v).child_by_source_to(db, res, file_id) + } + for (_, def_map) in body.blocks(db) { // All block expressions are merged into the same map, because they logically all add // inner items to the containing `DefWithBodyId`. diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index 631ae3cf11..2dc69b00ac 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -219,7 +219,7 @@ impl TraitData { pub(crate) fn trait_data_with_diagnostics_query( db: &dyn DefDatabase, tr: TraitId, - ) -> (Arc, Arc>) { + ) -> (Arc, Arc<[DefDiagnostic]>) { let tr_loc @ ItemLoc { container: module_id, id: tree_id } = tr.lookup(db); let item_tree = tree_id.item_tree(db); let tr_def = &item_tree[tree_id.value]; @@ -251,7 +251,7 @@ impl TraitData { visibility, skip_array_during_method_dispatch, }), - Arc::new(diagnostics), + diagnostics.into(), ) } @@ -299,7 +299,7 @@ impl ImplData { pub(crate) fn impl_data_with_diagnostics_query( db: &dyn DefDatabase, id: ImplId, - ) -> (Arc, Arc>) { + ) -> (Arc, Arc<[DefDiagnostic]>) { let _p = profile::span("impl_data_with_diagnostics_query"); let ItemLoc { container: module_id, id: tree_id } = id.lookup(db); @@ -318,7 +318,7 @@ impl ImplData { ( Arc::new(ImplData { target_trait, self_ty, items, is_negative, attribute_calls }), - Arc::new(diagnostics), + diagnostics.into(), ) } diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs index 40b2f734b7..431c825549 100644 --- a/crates/hir-def/src/db.rs +++ b/crates/hir-def/src/db.rs @@ -97,24 +97,33 @@ pub trait DefDatabase: InternDatabase + AstDatabase + Upcast { #[salsa::invoke(StructData::struct_data_query)] fn struct_data(&self, id: StructId) -> Arc; + #[salsa::invoke(StructData::struct_data_with_diagnostics_query)] + fn struct_data_with_diagnostics(&self, id: StructId) + -> (Arc, Arc<[DefDiagnostic]>); + #[salsa::invoke(StructData::union_data_query)] fn union_data(&self, id: UnionId) -> Arc; + #[salsa::invoke(StructData::union_data_with_diagnostics_query)] + fn union_data_with_diagnostics(&self, id: UnionId) -> (Arc, Arc<[DefDiagnostic]>); + #[salsa::invoke(EnumData::enum_data_query)] fn enum_data(&self, e: EnumId) -> Arc; + #[salsa::invoke(EnumData::enum_data_with_diagnostics_query)] + fn enum_data_with_diagnostics(&self, e: EnumId) -> (Arc, Arc<[DefDiagnostic]>); + #[salsa::invoke(ImplData::impl_data_query)] fn impl_data(&self, e: ImplId) -> Arc; #[salsa::invoke(ImplData::impl_data_with_diagnostics_query)] - fn impl_data_with_diagnostics(&self, e: ImplId) -> (Arc, Arc>); + fn impl_data_with_diagnostics(&self, e: ImplId) -> (Arc, Arc<[DefDiagnostic]>); #[salsa::invoke(TraitData::trait_data_query)] fn trait_data(&self, e: TraitId) -> Arc; #[salsa::invoke(TraitData::trait_data_with_diagnostics_query)] - fn trait_data_with_diagnostics(&self, tr: TraitId) - -> (Arc, Arc>); + fn trait_data_with_diagnostics(&self, tr: TraitId) -> (Arc, Arc<[DefDiagnostic]>); #[salsa::invoke(TypeAliasData::type_alias_data_query)] fn type_alias_data(&self, e: TypeAliasId) -> Arc; diff --git a/crates/hir-def/src/expr.rs b/crates/hir-def/src/expr.rs index 419d3feec3..1626465502 100644 --- a/crates/hir-def/src/expr.rs +++ b/crates/hir-def/src/expr.rs @@ -198,6 +198,7 @@ pub enum Expr { arg_types: Box<[Option>]>, ret_type: Option>, body: ExprId, + closure_kind: ClosureKind, }, Tuple { exprs: Box<[ExprId]>, @@ -211,6 +212,18 @@ pub enum Expr { Underscore, } +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ClosureKind { + Closure, + Generator(Movability), +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Movability { + Static, + Movable, +} + #[derive(Debug, Clone, Eq, PartialEq)] pub enum Array { ElementList { elements: Box<[ExprId]>, is_assignee_expr: bool }, diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs index b94b500040..c70e6fdccd 100644 --- a/crates/hir-def/src/find_path.rs +++ b/crates/hir-def/src/find_path.rs @@ -333,8 +333,8 @@ fn calculate_best_path( db, def_map, visited_modules, - from, crate_root, + from, info.container, max_len - 1, prefixed, diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index 3342d4db4a..570344596d 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -943,6 +943,7 @@ impl AssocItem { pub struct Variant { pub name: Name, pub fields: Fields, + pub ast_id: FileAstId, } #[derive(Debug, Clone, PartialEq, Eq)] @@ -952,10 +953,17 @@ pub enum Fields { Unit, } +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum FieldAstId { + Record(FileAstId), + Tuple(FileAstId), +} + /// A single field of an enum variant or struct #[derive(Debug, Clone, PartialEq, Eq)] pub struct Field { pub name: Name, pub type_ref: Interned, pub visibility: RawVisibilityId, + pub ast_id: FieldAstId, } diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs index 7f2551e941..077a1b619d 100644 --- a/crates/hir-def/src/item_tree/lower.rs +++ b/crates/hir-def/src/item_tree/lower.rs @@ -184,7 +184,8 @@ impl<'a> Ctx<'a> { let name = field.name()?.as_name(); let visibility = self.lower_visibility(field); let type_ref = self.lower_type_ref_opt(field.ty()); - let res = Field { name, type_ref, visibility }; + let ast_id = FieldAstId::Record(self.source_ast_id_map.ast_id(field)); + let res = Field { name, type_ref, visibility, ast_id }; Some(res) } @@ -203,7 +204,8 @@ impl<'a> Ctx<'a> { let name = Name::new_tuple_field(idx); let visibility = self.lower_visibility(field); let type_ref = self.lower_type_ref_opt(field.ty()); - Field { name, type_ref, visibility } + let ast_id = FieldAstId::Tuple(self.source_ast_id_map.ast_id(field)); + Field { name, type_ref, visibility, ast_id } } fn lower_union(&mut self, union: &ast::Union) -> Option> { @@ -247,7 +249,8 @@ impl<'a> Ctx<'a> { fn lower_variant(&mut self, variant: &ast::Variant) -> Option { let name = variant.name()?.as_name(); let fields = self.lower_fields(&variant.kind()); - let res = Variant { name, fields }; + let ast_id = self.source_ast_id_map.ast_id(variant); + let res = Variant { name, fields, ast_id }; Some(res) } diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs index 34dd817fd1..da1643152c 100644 --- a/crates/hir-def/src/item_tree/pretty.rs +++ b/crates/hir-def/src/item_tree/pretty.rs @@ -115,7 +115,7 @@ impl<'a> Printer<'a> { w!(self, "{{"); self.indented(|this| { for field in fields.clone() { - let Field { visibility, name, type_ref } = &this.tree[field]; + let Field { visibility, name, type_ref, ast_id: _ } = &this.tree[field]; this.print_attrs_of(field); this.print_visibility(*visibility); w!(this, "{}: ", name); @@ -129,7 +129,7 @@ impl<'a> Printer<'a> { w!(self, "("); self.indented(|this| { for field in fields.clone() { - let Field { visibility, name, type_ref } = &this.tree[field]; + let Field { visibility, name, type_ref, ast_id: _ } = &this.tree[field]; this.print_attrs_of(field); this.print_visibility(*visibility); w!(this, "{}: ", name); @@ -323,7 +323,7 @@ impl<'a> Printer<'a> { self.print_where_clause_and_opening_brace(generic_params); self.indented(|this| { for variant in variants.clone() { - let Variant { name, fields } = &this.tree[variant]; + let Variant { name, fields, ast_id: _ } = &this.tree[variant]; this.print_attrs_of(variant); w!(this, "{}", name); this.print_fields(fields); diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index 32ebfda4fd..5c7aa72349 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -474,16 +474,24 @@ pub enum DefWithBodyId { FunctionId(FunctionId), StaticId(StaticId), ConstId(ConstId), + VariantId(EnumVariantId), } impl_from!(FunctionId, ConstId, StaticId for DefWithBodyId); +impl From for DefWithBodyId { + fn from(id: EnumVariantId) -> Self { + DefWithBodyId::VariantId(id) + } +} + impl DefWithBodyId { pub fn as_generic_def_id(self) -> Option { match self { DefWithBodyId::FunctionId(f) => Some(f.into()), DefWithBodyId::StaticId(_) => None, DefWithBodyId::ConstId(c) => Some(c.into()), + DefWithBodyId::VariantId(c) => Some(c.into()), } } } @@ -681,6 +689,7 @@ impl HasModule for DefWithBodyId { DefWithBodyId::FunctionId(it) => it.lookup(db).module(db), DefWithBodyId::StaticId(it) => it.lookup(db).module(db), DefWithBodyId::ConstId(it) => it.lookup(db).module(db), + DefWithBodyId::VariantId(it) => it.parent.lookup(db).container, } } } @@ -691,6 +700,7 @@ impl DefWithBodyId { DefWithBodyId::FunctionId(it) => it.lookup(db).id.value.into(), DefWithBodyId::StaticId(it) => it.lookup(db).id.value.into(), DefWithBodyId::ConstId(it) => it.lookup(db).id.value.into(), + DefWithBodyId::VariantId(it) => it.parent.lookup(db).id.value.into(), } } } diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 9242b48c59..9ffc218818 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -2122,7 +2122,7 @@ impl ModCollector<'_, '_> { fn emit_unconfigured_diagnostic(&mut self, item: ModItem, cfg: &CfgExpr) { let ast_id = item.ast_id(self.item_tree); - let ast_id = InFile::new(self.file_id(), ast_id); + let ast_id = InFile::new(self.file_id(), ast_id.upcast()); self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code( self.module_id, ast_id, diff --git a/crates/hir-def/src/nameres/diagnostics.rs b/crates/hir-def/src/nameres/diagnostics.rs index ed7e920fd2..0661422919 100644 --- a/crates/hir-def/src/nameres/diagnostics.rs +++ b/crates/hir-def/src/nameres/diagnostics.rs @@ -4,7 +4,7 @@ use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; use hir_expand::MacroCallKind; use la_arena::Idx; -use syntax::ast; +use syntax::ast::{self, AnyHasAttrs}; use crate::{ attr::AttrId, @@ -22,7 +22,7 @@ pub enum DefDiagnosticKind { UnresolvedImport { id: ItemTreeId, index: Idx }, - UnconfiguredCode { ast: AstId, cfg: CfgExpr, opts: CfgOptions }, + UnconfiguredCode { ast: AstId, cfg: CfgExpr, opts: CfgOptions }, UnresolvedProcMacro { ast: MacroCallKind, krate: CrateId }, @@ -75,7 +75,7 @@ impl DefDiagnostic { pub fn unconfigured_code( container: LocalModuleId, - ast: AstId, + ast: AstId, cfg: CfgExpr, opts: CfgOptions, ) -> Self { diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs index 8aa5973cac..070f683713 100644 --- a/crates/hir-def/src/resolver.rs +++ b/crates/hir-def/src/resolver.rs @@ -839,6 +839,7 @@ impl HasResolver for DefWithBodyId { DefWithBodyId::ConstId(c) => c.resolver(db), DefWithBodyId::FunctionId(f) => f.resolver(db), DefWithBodyId::StaticId(s) => s.resolver(db), + DefWithBodyId::VariantId(v) => v.parent.resolver(db), } } } diff --git a/crates/hir-expand/src/ast_id_map.rs b/crates/hir-expand/src/ast_id_map.rs index 11c0a6764e..2b27db0e95 100644 --- a/crates/hir-expand/src/ast_id_map.rs +++ b/crates/hir-expand/src/ast_id_map.rs @@ -93,7 +93,12 @@ impl AstIdMap { // trait does not change ids of top-level items, which helps caching. bdfs(node, |it| { let kind = it.kind(); - if ast::Item::can_cast(kind) || ast::BlockExpr::can_cast(kind) { + if ast::Item::can_cast(kind) + || ast::BlockExpr::can_cast(kind) + || ast::Variant::can_cast(kind) + || ast::RecordField::can_cast(kind) + || ast::TupleField::can_cast(kind) + { res.alloc(&it); true } else { diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index fc128102f2..a5b499fe8d 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -811,6 +811,31 @@ impl<'a> InFile<&'a SyntaxNode> { _ => None, } } + + pub fn original_syntax_node(self, db: &dyn db::AstDatabase) -> Option> { + // This kind of upmapping can only be achieved in attribute expanded files, + // as we don't have node inputs otherwise and therefor can't find an `N` node in the input + if !self.file_id.is_macro() { + return Some(self.map(Clone::clone)); + } else if !self.file_id.is_attr_macro(db) { + return None; + } + + if let Some(InFile { file_id, value: (first, last) }) = ascend_node_border_tokens(db, self) + { + if file_id.is_macro() { + let range = first.text_range().cover(last.text_range()); + tracing::error!("Failed mapping out of macro file for {:?}", range); + return None; + } + // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes + let anc = algo::least_common_ancestor(&first.parent()?, &last.parent()?)?; + let kind = self.value.kind(); + let value = anc.ancestors().find(|it| it.kind() == kind)?; + return Some(InFile::new(file_id, value)); + } + None + } } impl InFile { diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs index 4ce21a5796..2679a1c360 100644 --- a/crates/hir-expand/src/name.rs +++ b/crates/hir-expand/src/name.rs @@ -263,6 +263,7 @@ pub mod known { Iterator, IntoIterator, Item, + IntoIter, Try, Ok, Future, diff --git a/crates/hir-ty/src/autoderef.rs b/crates/hir-ty/src/autoderef.rs index e106c4c2cf..78911d8dc0 100644 --- a/crates/hir-ty/src/autoderef.rs +++ b/crates/hir-ty/src/autoderef.rs @@ -123,13 +123,14 @@ fn deref_by_trait(table: &mut InferenceTable<'_>, ty: Ty) -> Option { let target = db.trait_data(deref_trait).associated_type_by_name(&name![Target])?; let projection = { - let b = TyBuilder::assoc_type_projection(db, target); + let b = TyBuilder::subst_for_def(db, deref_trait, None); if b.remaining() != 1 { // the Target type + Deref trait should only have one generic parameter, // namely Deref's Self type return None; } - b.push(ty).build() + let deref_subst = b.push(ty).build(); + TyBuilder::assoc_type_projection(db, target, Some(deref_subst)).build() }; // Check that the type implements Deref at all diff --git a/crates/hir-ty/src/builder.rs b/crates/hir-ty/src/builder.rs index 94d7806cb6..9ae752556d 100644 --- a/crates/hir-ty/src/builder.rs +++ b/crates/hir-ty/src/builder.rs @@ -6,19 +6,19 @@ use chalk_ir::{ cast::{Cast, CastTo, Caster}, fold::TypeFoldable, interner::HasInterner, - AdtId, BoundVar, DebruijnIndex, Scalar, + AdtId, DebruijnIndex, Scalar, }; use hir_def::{ - builtin_type::BuiltinType, generics::TypeOrConstParamData, ConstParamId, GenericDefId, TraitId, - TypeAliasId, + builtin_type::BuiltinType, generics::TypeOrConstParamData, ConstParamId, DefWithBodyId, + GenericDefId, TraitId, TypeAliasId, }; use smallvec::SmallVec; use crate::{ consteval::unknown_const_as_generic, db::HirDatabase, infer::unify::InferenceTable, primitive, - to_assoc_type_id, to_chalk_trait_id, utils::generics, Binders, CallableSig, ConstData, - ConstValue, GenericArg, GenericArgData, Interner, ProjectionTy, Substitution, TraitRef, Ty, - TyDefId, TyExt, TyKind, ValueTyDefId, + to_assoc_type_id, to_chalk_trait_id, utils::generics, Binders, BoundVar, CallableSig, + GenericArg, Interner, ProjectionTy, Substitution, TraitRef, Ty, TyDefId, TyExt, TyKind, + ValueTyDefId, }; #[derive(Debug, Clone, PartialEq, Eq)] @@ -34,17 +34,32 @@ pub struct TyBuilder { data: D, vec: SmallVec<[GenericArg; 2]>, param_kinds: SmallVec<[ParamKind; 2]>, + parent_subst: Substitution, } impl TyBuilder { fn with_data(self, data: B) -> TyBuilder { - TyBuilder { data, param_kinds: self.param_kinds, vec: self.vec } + TyBuilder { + data, + vec: self.vec, + param_kinds: self.param_kinds, + parent_subst: self.parent_subst, + } } } impl TyBuilder { - fn new(data: D, param_kinds: SmallVec<[ParamKind; 2]>) -> TyBuilder { - TyBuilder { data, vec: SmallVec::with_capacity(param_kinds.len()), param_kinds } + fn new( + data: D, + param_kinds: SmallVec<[ParamKind; 2]>, + parent_subst: Option, + ) -> Self { + let parent_subst = parent_subst.unwrap_or_else(|| Substitution::empty(Interner)); + Self { data, vec: SmallVec::with_capacity(param_kinds.len()), param_kinds, parent_subst } + } + + fn new_empty(data: D) -> Self { + TyBuilder::new(data, SmallVec::new(), None) } fn build_internal(self) -> (D, Substitution) { @@ -52,13 +67,18 @@ impl TyBuilder { for (a, e) in self.vec.iter().zip(self.param_kinds.iter()) { self.assert_match_kind(a, e); } - let subst = Substitution::from_iter(Interner, self.vec); + let subst = Substitution::from_iter( + Interner, + self.vec.into_iter().chain(self.parent_subst.iter(Interner).cloned()), + ); (self.data, subst) } pub fn push(mut self, arg: impl CastTo) -> Self { + assert!(self.remaining() > 0); let arg = arg.cast(Interner); let expected_kind = &self.param_kinds[self.vec.len()]; + let arg_kind = match arg.data(Interner) { chalk_ir::GenericArgData::Ty(_) => ParamKind::Type, chalk_ir::GenericArgData::Lifetime(_) => panic!("Got lifetime in TyBuilder::push"), @@ -68,7 +88,9 @@ impl TyBuilder { } }; assert_eq!(*expected_kind, arg_kind); + self.vec.push(arg); + self } @@ -79,20 +101,12 @@ impl TyBuilder { pub fn fill_with_bound_vars(self, debruijn: DebruijnIndex, starting_from: usize) -> Self { // self.fill is inlined to make borrow checker happy let mut this = self; - let other = this.param_kinds.iter().skip(this.vec.len()); + let other = &this.param_kinds[this.vec.len()..]; let filler = (starting_from..).zip(other).map(|(idx, kind)| match kind { - ParamKind::Type => { - GenericArgData::Ty(TyKind::BoundVar(BoundVar::new(debruijn, idx)).intern(Interner)) - .intern(Interner) + ParamKind::Type => BoundVar::new(debruijn, idx).to_ty(Interner).cast(Interner), + ParamKind::Const(ty) => { + BoundVar::new(debruijn, idx).to_const(Interner, ty.clone()).cast(Interner) } - ParamKind::Const(ty) => GenericArgData::Const( - ConstData { - value: ConstValue::BoundVar(BoundVar::new(debruijn, idx)), - ty: ty.clone(), - } - .intern(Interner), - ) - .intern(Interner), }); this.vec.extend(filler.take(this.remaining()).casted(Interner)); assert_eq!(this.remaining(), 0); @@ -102,8 +116,8 @@ impl TyBuilder { pub fn fill_with_unknown(self) -> Self { // self.fill is inlined to make borrow checker happy let mut this = self; - let filler = this.param_kinds.iter().skip(this.vec.len()).map(|x| match x { - ParamKind::Type => GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner), + let filler = this.param_kinds[this.vec.len()..].iter().map(|x| match x { + ParamKind::Type => TyKind::Error.intern(Interner).cast(Interner), ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), }); this.vec.extend(filler.casted(Interner)); @@ -113,33 +127,17 @@ impl TyBuilder { pub(crate) fn fill_with_inference_vars(self, table: &mut InferenceTable<'_>) -> Self { self.fill(|x| match x { - ParamKind::Type => GenericArgData::Ty(table.new_type_var()).intern(Interner), - ParamKind::Const(ty) => { - GenericArgData::Const(table.new_const_var(ty.clone())).intern(Interner) - } + ParamKind::Type => table.new_type_var().cast(Interner), + ParamKind::Const(ty) => table.new_const_var(ty.clone()).cast(Interner), }) } pub fn fill(mut self, filler: impl FnMut(&ParamKind) -> GenericArg) -> Self { - self.vec.extend(self.param_kinds.iter().skip(self.vec.len()).map(filler)); + self.vec.extend(self.param_kinds[self.vec.len()..].iter().map(filler)); assert_eq!(self.remaining(), 0); self } - pub fn use_parent_substs(mut self, parent_substs: &Substitution) -> Self { - assert!(self.vec.is_empty()); - assert!(parent_substs.len(Interner) <= self.param_kinds.len()); - self.extend(parent_substs.iter(Interner).cloned()); - self - } - - fn extend(&mut self, it: impl Iterator + Clone) { - for x in it.clone().zip(self.param_kinds.iter().skip(self.vec.len())) { - self.assert_match_kind(&x.0, &x.1); - } - self.vec.extend(it); - } - fn assert_match_kind(&self, a: &chalk_ir::GenericArg, e: &ParamKind) { match (a.data(Interner), e) { (chalk_ir::GenericArgData::Ty(_), ParamKind::Type) @@ -188,21 +186,42 @@ impl TyBuilder<()> { params.placeholder_subst(db) } - pub fn subst_for_def(db: &dyn HirDatabase, def: impl Into) -> TyBuilder<()> { - let def = def.into(); - let params = generics(db.upcast(), def); - TyBuilder::new( - (), - params - .iter() - .map(|(id, data)| match data { - TypeOrConstParamData::TypeParamData(_) => ParamKind::Type, - TypeOrConstParamData::ConstParamData(_) => { - ParamKind::Const(db.const_param_ty(ConstParamId::from_unchecked(id))) - } - }) - .collect(), - ) + pub fn subst_for_def( + db: &dyn HirDatabase, + def: impl Into, + parent_subst: Option, + ) -> TyBuilder<()> { + let generics = generics(db.upcast(), def.into()); + assert!(generics.parent_generics().is_some() == parent_subst.is_some()); + let params = generics + .iter_self() + .map(|(id, data)| match data { + TypeOrConstParamData::TypeParamData(_) => ParamKind::Type, + TypeOrConstParamData::ConstParamData(_) => { + ParamKind::Const(db.const_param_ty(ConstParamId::from_unchecked(id))) + } + }) + .collect(); + TyBuilder::new((), params, parent_subst) + } + + /// Creates a `TyBuilder` to build `Substitution` for a generator defined in `parent`. + /// + /// A generator's substitution consists of: + /// - resume type of generator + /// - yield type of generator ([`Generator::Yield`](std::ops::Generator::Yield)) + /// - return type of generator ([`Generator::Return`](std::ops::Generator::Return)) + /// - generic parameters in scope on `parent` + /// in this order. + /// + /// This method prepopulates the builder with placeholder substitution of `parent`, so you + /// should only push exactly 3 `GenericArg`s before building. + pub fn subst_for_generator(db: &dyn HirDatabase, parent: DefWithBodyId) -> TyBuilder<()> { + let parent_subst = + parent.as_generic_def_id().map(|p| generics(db.upcast(), p).placeholder_subst(db)); + // These represent resume type, yield type, and return type of generator. + let params = std::iter::repeat(ParamKind::Type).take(3).collect(); + TyBuilder::new((), params, parent_subst) } pub fn build(self) -> Substitution { @@ -213,7 +232,7 @@ impl TyBuilder<()> { impl TyBuilder { pub fn adt(db: &dyn HirDatabase, def: hir_def::AdtId) -> TyBuilder { - TyBuilder::subst_for_def(db, def).with_data(def) + TyBuilder::subst_for_def(db, def, None).with_data(def) } pub fn fill_with_defaults( @@ -221,16 +240,27 @@ impl TyBuilder { db: &dyn HirDatabase, mut fallback: impl FnMut() -> Ty, ) -> Self { + // Note that we're building ADT, so we never have parent generic parameters. let defaults = db.generic_defaults(self.data.into()); + let dummy_ty = TyKind::Error.intern(Interner).cast(Interner); for default_ty in defaults.iter().skip(self.vec.len()) { - if let GenericArgData::Ty(x) = default_ty.skip_binders().data(Interner) { + // NOTE(skip_binders): we only check if the arg type is error type. + if let Some(x) = default_ty.skip_binders().ty(Interner) { if x.is_unknown() { self.vec.push(fallback().cast(Interner)); continue; } - }; - // each default can depend on the previous parameters - let subst_so_far = Substitution::from_iter(Interner, self.vec.clone()); + } + // Each default can only depend on the previous parameters. + // FIXME: we don't handle const generics here. + let subst_so_far = Substitution::from_iter( + Interner, + self.vec + .iter() + .cloned() + .chain(iter::repeat(dummy_ty.clone())) + .take(self.param_kinds.len()), + ); self.vec.push(default_ty.clone().substitute(Interner, &subst_so_far).cast(Interner)); } self @@ -245,7 +275,7 @@ impl TyBuilder { pub struct Tuple(usize); impl TyBuilder { pub fn tuple(size: usize) -> TyBuilder { - TyBuilder::new(Tuple(size), iter::repeat(ParamKind::Type).take(size).collect()) + TyBuilder::new(Tuple(size), iter::repeat(ParamKind::Type).take(size).collect(), None) } pub fn build(self) -> Ty { @@ -256,7 +286,7 @@ impl TyBuilder { impl TyBuilder { pub fn trait_ref(db: &dyn HirDatabase, def: TraitId) -> TyBuilder { - TyBuilder::subst_for_def(db, def).with_data(def) + TyBuilder::subst_for_def(db, def, None).with_data(def) } pub fn build(self) -> TraitRef { @@ -266,8 +296,12 @@ impl TyBuilder { } impl TyBuilder { - pub fn assoc_type_projection(db: &dyn HirDatabase, def: TypeAliasId) -> TyBuilder { - TyBuilder::subst_for_def(db, def).with_data(def) + pub fn assoc_type_projection( + db: &dyn HirDatabase, + def: TypeAliasId, + parent_subst: Option, + ) -> TyBuilder { + TyBuilder::subst_for_def(db, def, parent_subst).with_data(def) } pub fn build(self) -> ProjectionTy { @@ -277,19 +311,6 @@ impl TyBuilder { } impl + TypeFoldable> TyBuilder> { - fn subst_binders(b: Binders) -> Self { - let param_kinds = b - .binders - .iter(Interner) - .map(|x| match x { - chalk_ir::VariableKind::Ty(_) => ParamKind::Type, - chalk_ir::VariableKind::Lifetime => panic!("Got lifetime parameter"), - chalk_ir::VariableKind::Const(ty) => ParamKind::Const(ty.clone()), - }) - .collect(); - TyBuilder::new(b, param_kinds) - } - pub fn build(self) -> T { let (b, subst) = self.build_internal(); b.substitute(Interner, &subst) @@ -297,15 +318,41 @@ impl + TypeFoldable> TyBuilder> { - pub fn def_ty(db: &dyn HirDatabase, def: TyDefId) -> TyBuilder> { - TyBuilder::subst_binders(db.ty(def)) + pub fn def_ty( + db: &dyn HirDatabase, + def: TyDefId, + parent_subst: Option, + ) -> TyBuilder> { + let poly_ty = db.ty(def); + let id: GenericDefId = match def { + TyDefId::BuiltinType(_) => { + assert!(parent_subst.is_none()); + return TyBuilder::new_empty(poly_ty); + } + TyDefId::AdtId(id) => id.into(), + TyDefId::TypeAliasId(id) => id.into(), + }; + TyBuilder::subst_for_def(db, id, parent_subst).with_data(poly_ty) } pub fn impl_self_ty(db: &dyn HirDatabase, def: hir_def::ImplId) -> TyBuilder> { - TyBuilder::subst_binders(db.impl_self_ty(def)) + TyBuilder::subst_for_def(db, def, None).with_data(db.impl_self_ty(def)) } - pub fn value_ty(db: &dyn HirDatabase, def: ValueTyDefId) -> TyBuilder> { - TyBuilder::subst_binders(db.value_ty(def)) + pub fn value_ty( + db: &dyn HirDatabase, + def: ValueTyDefId, + parent_subst: Option, + ) -> TyBuilder> { + let poly_value_ty = db.value_ty(def); + let id = match def.to_generic_def_id() { + Some(id) => id, + None => { + // static items + assert!(parent_subst.is_none()); + return TyBuilder::new_empty(poly_value_ty); + } + }; + TyBuilder::subst_for_def(db, id, parent_subst).with_data(poly_value_ty) } } diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index faec99c7d3..3f3f8f7d0f 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -11,6 +11,7 @@ use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait}; use base_db::CrateId; use hir_def::{ + expr::Movability, lang_item::{lang_attr, LangItemTarget}, AssocItemId, GenericDefId, HasModule, ItemContainerId, Lookup, ModuleId, TypeAliasId, }; @@ -26,9 +27,9 @@ use crate::{ to_assoc_type_id, to_chalk_trait_id, traits::ChalkContext, utils::generics, - AliasEq, AliasTy, BoundVar, CallableDefId, DebruijnIndex, FnDefId, Interner, ProjectionTy, - ProjectionTyExt, QuantifiedWhereClause, Substitution, TraitRef, TraitRefExt, Ty, TyBuilder, - TyExt, TyKind, WhereClause, + wrap_empty_binders, AliasEq, AliasTy, BoundVar, CallableDefId, DebruijnIndex, FnDefId, + Interner, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Substitution, TraitRef, + TraitRefExt, Ty, TyBuilder, TyExt, TyKind, WhereClause, }; pub(crate) type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum; @@ -372,17 +373,62 @@ impl<'a> chalk_solve::RustIrDatabase for ChalkContext<'a> { } fn generator_datum( &self, - _: chalk_ir::GeneratorId, + id: chalk_ir::GeneratorId, ) -> std::sync::Arc> { - // FIXME - unimplemented!() + let (parent, expr) = self.db.lookup_intern_generator(id.into()); + + // We fill substitution with unknown type, because we only need to know whether the generic + // params are types or consts to build `Binders` and those being filled up are for + // `resume_type`, `yield_type`, and `return_type` of the generator in question. + let subst = TyBuilder::subst_for_generator(self.db, parent).fill_with_unknown().build(); + + let input_output = rust_ir::GeneratorInputOutputDatum { + resume_type: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)) + .intern(Interner), + yield_type: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 1)) + .intern(Interner), + return_type: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 2)) + .intern(Interner), + // FIXME: calculate upvars + upvars: vec![], + }; + + let it = subst + .iter(Interner) + .map(|it| it.constant(Interner).map(|c| c.data(Interner).ty.clone())); + let input_output = crate::make_type_and_const_binders(it, input_output); + + let movability = match self.db.body(parent)[expr] { + hir_def::expr::Expr::Closure { + closure_kind: hir_def::expr::ClosureKind::Generator(movability), + .. + } => movability, + _ => unreachable!("non generator expression interned as generator"), + }; + let movability = match movability { + Movability::Static => rust_ir::Movability::Static, + Movability::Movable => rust_ir::Movability::Movable, + }; + + Arc::new(rust_ir::GeneratorDatum { movability, input_output }) } fn generator_witness_datum( &self, - _: chalk_ir::GeneratorId, + id: chalk_ir::GeneratorId, ) -> std::sync::Arc> { - // FIXME - unimplemented!() + // FIXME: calculate inner types + let inner_types = + rust_ir::GeneratorWitnessExistential { types: wrap_empty_binders(vec![]) }; + + let (parent, _) = self.db.lookup_intern_generator(id.into()); + // See the comment in `generator_datum()` for unknown types. + let subst = TyBuilder::subst_for_generator(self.db, parent).fill_with_unknown().build(); + let it = subst + .iter(Interner) + .map(|it| it.constant(Interner).map(|c| c.data(Interner).ty.clone())); + let inner_types = crate::make_type_and_const_binders(it, inner_types); + + Arc::new(rust_ir::GeneratorWitnessDatum { inner_types }) } fn unification_database(&self) -> &dyn chalk_ir::UnificationDatabase { @@ -429,10 +475,15 @@ pub(crate) fn associated_ty_data_query( let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast()); let ctx = crate::TyLoweringContext::new(db, &resolver) .with_type_param_mode(crate::lower::ParamLoweringMode::Variable); - let pro_ty = TyBuilder::assoc_type_projection(db, type_alias) + + let trait_subst = TyBuilder::subst_for_def(db, trait_, None) + .fill_with_bound_vars(crate::DebruijnIndex::INNERMOST, generic_params.len_self()) + .build(); + let pro_ty = TyBuilder::assoc_type_projection(db, type_alias, Some(trait_subst)) .fill_with_bound_vars(crate::DebruijnIndex::INNERMOST, 0) .build(); let self_ty = TyKind::Alias(AliasTy::Projection(pro_ty)).intern(Interner); + let mut bounds: Vec<_> = type_alias_data .bounds .iter() diff --git a/crates/hir-ty/src/chalk_ext.rs b/crates/hir-ty/src/chalk_ext.rs index 4a5533c648..4f0e9dbf1e 100644 --- a/crates/hir-ty/src/chalk_ext.rs +++ b/crates/hir-ty/src/chalk_ext.rs @@ -152,7 +152,7 @@ impl TyExt for Ty { TyKind::FnDef(def, parameters) => { let callable_def = db.lookup_intern_callable_def((*def).into()); let sig = db.callable_item_signature(callable_def); - Some(sig.substitute(Interner, ¶meters)) + Some(sig.substitute(Interner, parameters)) } TyKind::Closure(.., substs) => { let sig_param = substs.at(Interner, 0).assert_ty_ref(Interner); @@ -166,6 +166,8 @@ impl TyExt for Ty { let trait_ref = match self.kind(Interner) { // The principal trait bound should be the first element of the bounds. This is an // invariant ensured by `TyLoweringContext::lower_dyn_trait()`. + // FIXME: dyn types may not have principal trait and we don't want to return auto trait + // here. TyKind::Dyn(dyn_ty) => dyn_ty.bounds.skip_binders().interned().get(0).and_then(|b| { match b.skip_binders() { WhereClause::Implemented(trait_ref) => Some(trait_ref), diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs index 6ecb6e6fd1..2c0c6e0b83 100644 --- a/crates/hir-ty/src/consteval.rs +++ b/crates/hir-ty/src/consteval.rs @@ -7,14 +7,17 @@ use std::{ use chalk_ir::{BoundVar, DebruijnIndex, GenericArgData, IntTy, Scalar}; use hir_def::{ + builtin_type::BuiltinInt, expr::{ArithOp, BinaryOp, Expr, ExprId, Literal, Pat, PatId}, path::ModPath, resolver::{resolver_for_expr, ResolveValueResult, Resolver, ValueNs}, + src::HasChildSource, type_ref::ConstScalar, - ConstId, DefWithBodyId, + ConstId, DefWithBodyId, EnumVariantId, Lookup, }; -use la_arena::{Arena, Idx}; +use la_arena::{Arena, Idx, RawIdx}; use stdx::never; +use syntax::ast::HasName; use crate::{ db::HirDatabase, infer::InferenceContext, lower::ParamLoweringMode, to_placeholder_idx, @@ -77,6 +80,7 @@ pub enum ConstEvalError { #[derive(Debug, Clone, PartialEq, Eq)] pub enum ComputedExpr { Literal(Literal), + Enum(String, EnumVariantId, Literal), Tuple(Box<[ComputedExpr]>), } @@ -104,6 +108,7 @@ impl Display for ComputedExpr { Literal::String(x) => std::fmt::Debug::fmt(x, f), Literal::ByteString(x) => std::fmt::Debug::fmt(x, f), }, + ComputedExpr::Enum(name, _, _) => name.fmt(f), ComputedExpr::Tuple(t) => { f.write_char('(')?; for x in &**t { @@ -148,13 +153,51 @@ fn is_valid(scalar: &Scalar, value: i128) -> bool { } } +fn get_name(ctx: &mut ConstEvalCtx<'_>, variant: EnumVariantId) -> String { + let loc = variant.parent.lookup(ctx.db.upcast()); + let children = variant.parent.child_source(ctx.db.upcast()); + let item_tree = loc.id.item_tree(ctx.db.upcast()); + + let variant_name = children.value[variant.local_id].name(); + let enum_name = item_tree[loc.id.value].name.to_string(); + enum_name + "::" + &variant_name.unwrap().to_string() +} + pub fn eval_const( expr_id: ExprId, ctx: &mut ConstEvalCtx<'_>, ) -> Result { + let u128_to_i128 = |it: u128| -> Result { + it.try_into().map_err(|_| ConstEvalError::NotSupported("u128 is too big")) + }; + let expr = &ctx.exprs[expr_id]; match expr { - Expr::Missing => Err(ConstEvalError::IncompleteExpr), + Expr::Missing => match ctx.owner { + // evaluate the implicit variant index of an enum variant without expression + // FIXME: This should return the type of the enum representation + DefWithBodyId::VariantId(variant) => { + let prev_idx: u32 = variant.local_id.into_raw().into(); + let prev_idx = prev_idx.checked_sub(1).map(RawIdx::from).map(Idx::from_raw); + let value = match prev_idx { + Some(local_id) => { + let prev_variant = EnumVariantId { local_id, parent: variant.parent }; + 1 + match ctx.db.const_eval_variant(prev_variant)? { + ComputedExpr::Literal(Literal::Int(v, _)) => v, + ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?, + _ => { + return Err(ConstEvalError::NotSupported( + "Enum can't contain this kind of value", + )) + } + } + } + _ => 0, + }; + Ok(ComputedExpr::Literal(Literal::Int(value, Some(BuiltinInt::I128)))) + } + _ => Err(ConstEvalError::IncompleteExpr), + }, Expr::Literal(l) => Ok(ComputedExpr::Literal(l.clone())), &Expr::UnaryOp { expr, op } => { let ty = &ctx.expr_ty(expr); @@ -167,9 +210,7 @@ pub fn eval_const( return Ok(ComputedExpr::Literal(Literal::Bool(!b))) } ComputedExpr::Literal(Literal::Int(v, _)) => v, - ComputedExpr::Literal(Literal::Uint(v, _)) => v - .try_into() - .map_err(|_| ConstEvalError::NotSupported("too big u128"))?, + ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?, _ => return Err(ConstEvalError::NotSupported("this kind of operator")), }; let r = match ty.kind(Interner) { @@ -198,9 +239,7 @@ pub fn eval_const( hir_def::expr::UnaryOp::Neg => { let v = match ev { ComputedExpr::Literal(Literal::Int(v, _)) => v, - ComputedExpr::Literal(Literal::Uint(v, _)) => v - .try_into() - .map_err(|_| ConstEvalError::NotSupported("too big u128"))?, + ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?, _ => return Err(ConstEvalError::NotSupported("this kind of operator")), }; Ok(ComputedExpr::Literal(Literal::Int( @@ -219,16 +258,12 @@ pub fn eval_const( let op = op.ok_or(ConstEvalError::IncompleteExpr)?; let v1 = match lhs { ComputedExpr::Literal(Literal::Int(v, _)) => v, - ComputedExpr::Literal(Literal::Uint(v, _)) => { - v.try_into().map_err(|_| ConstEvalError::NotSupported("too big u128"))? - } + ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?, _ => return Err(ConstEvalError::NotSupported("this kind of operator")), }; let v2 = match rhs { ComputedExpr::Literal(Literal::Int(v, _)) => v, - ComputedExpr::Literal(Literal::Uint(v, _)) => { - v.try_into().map_err(|_| ConstEvalError::NotSupported("too big u128"))? - } + ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?, _ => return Err(ConstEvalError::NotSupported("this kind of operator")), }; match op { @@ -339,9 +374,22 @@ pub fn eval_const( ValueNs::GenericParam(_) => { Err(ConstEvalError::NotSupported("const generic without substitution")) } + ValueNs::EnumVariantId(id) => match ctx.db.const_eval_variant(id)? { + ComputedExpr::Literal(lit) => { + Ok(ComputedExpr::Enum(get_name(ctx, id), id, lit)) + } + _ => Err(ConstEvalError::NotSupported( + "Enums can't evalute to anything but numbers", + )), + }, _ => Err(ConstEvalError::NotSupported("path that are not const or local")), } } + // FIXME: Handle the cast target + &Expr::Cast { expr, .. } => match eval_const(expr, ctx)? { + ComputedExpr::Enum(_, _, lit) => Ok(ComputedExpr::Literal(lit)), + _ => Err(ConstEvalError::NotSupported("Can't cast these types")), + }, _ => Err(ConstEvalError::NotSupported("This kind of expression")), } } @@ -412,7 +460,15 @@ pub(crate) fn const_eval_recover( Err(ConstEvalError::Loop) } -pub(crate) fn const_eval_query( +pub(crate) fn const_eval_variant_recover( + _: &dyn HirDatabase, + _: &[String], + _: &EnumVariantId, +) -> Result { + Err(ConstEvalError::Loop) +} + +pub(crate) fn const_eval_variant_query( db: &dyn HirDatabase, const_id: ConstId, ) -> Result { @@ -433,6 +489,26 @@ pub(crate) fn const_eval_query( result } +pub(crate) fn const_eval_query_variant( + db: &dyn HirDatabase, + variant_id: EnumVariantId, +) -> Result { + let def = variant_id.into(); + let body = db.body(def); + let infer = &db.infer(def); + eval_const( + body.body_expr, + &mut ConstEvalCtx { + db, + owner: def, + exprs: &body.exprs, + pats: &body.pats, + local_data: HashMap::default(), + infer, + }, + ) +} + pub(crate) fn eval_to_const<'a>( expr: Idx, mode: ParamLoweringMode, diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index 4a052851af..b76506f6eb 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs @@ -87,6 +87,49 @@ fn consts() { ); } +#[test] +fn enums() { + check_number( + r#" + enum E { + F1 = 1, + F2 = 2 * E::F1 as u8, + F3 = 3 * E::F2 as u8, + } + const GOAL: i32 = E::F3 as u8; + "#, + 6, + ); + check_number( + r#" + enum E { F1 = 1, F2, } + const GOAL: i32 = E::F2 as u8; + "#, + 2, + ); + check_number( + r#" + enum E { F1, } + const GOAL: i32 = E::F1 as u8; + "#, + 0, + ); + let r = eval_goal( + r#" + enum E { A = 1, } + const GOAL: E = E::A; + "#, + ) + .unwrap(); + match r { + ComputedExpr::Enum(name, _, Literal::Uint(val, _)) => { + assert_eq!(name, "E::A"); + assert_eq!(val, 1); + } + x => panic!("Expected enum but found {:?}", x), + } +} + #[test] fn const_loop() { check_fail( diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs index 69283e55a4..932fce8356 100644 --- a/crates/hir-ty/src/db.rs +++ b/crates/hir-ty/src/db.rs @@ -6,8 +6,8 @@ use std::sync::Arc; use arrayvec::ArrayVec; use base_db::{impl_intern_key, salsa, CrateId, Upcast}; use hir_def::{ - db::DefDatabase, expr::ExprId, BlockId, ConstId, ConstParamId, DefWithBodyId, FunctionId, - GenericDefId, ImplId, LifetimeParamId, LocalFieldId, TypeOrConstParamId, VariantId, + db::DefDatabase, expr::ExprId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumVariantId, + FunctionId, GenericDefId, ImplId, LifetimeParamId, LocalFieldId, TypeOrConstParamId, VariantId, }; use la_arena::ArenaMap; @@ -43,10 +43,14 @@ pub trait HirDatabase: DefDatabase + Upcast { #[salsa::invoke(crate::lower::const_param_ty_query)] fn const_param_ty(&self, def: ConstParamId) -> Ty; - #[salsa::invoke(crate::consteval::const_eval_query)] + #[salsa::invoke(crate::consteval::const_eval_variant_query)] #[salsa::cycle(crate::consteval::const_eval_recover)] fn const_eval(&self, def: ConstId) -> Result; + #[salsa::invoke(crate::consteval::const_eval_query_variant)] + #[salsa::cycle(crate::consteval::const_eval_variant_recover)] + fn const_eval_variant(&self, def: EnumVariantId) -> Result; + #[salsa::invoke(crate::lower::impl_trait_query)] fn impl_trait(&self, def: ImplId) -> Option>; @@ -116,6 +120,8 @@ pub trait HirDatabase: DefDatabase + Upcast { fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId; #[salsa::interned] fn intern_closure(&self, id: (DefWithBodyId, ExprId)) -> InternedClosureId; + #[salsa::interned] + fn intern_generator(&self, id: (DefWithBodyId, ExprId)) -> InternedGeneratorId; #[salsa::invoke(chalk_db::associated_ty_data_query)] fn associated_ty_data(&self, id: chalk_db::AssocTypeId) -> Arc; @@ -188,6 +194,9 @@ fn infer_wait(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc DefWithBodyId::ConstId(it) => { db.const_data(it).name.clone().unwrap_or_else(Name::missing).to_string() } + DefWithBodyId::VariantId(it) => { + db.enum_data(it.parent).variants[it.local_id].name.to_string() + } }); db.infer_query(def) } @@ -226,6 +235,10 @@ impl_intern_key!(InternedOpaqueTyId); pub struct InternedClosureId(salsa::InternId); impl_intern_key!(InternedClosureId); +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct InternedGeneratorId(salsa::InternId); +impl_intern_key!(InternedGeneratorId); + /// This exists just for Chalk, because Chalk just has a single `FnDefId` where /// we have different IDs for struct and enum variant constructors. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] diff --git a/crates/hir-ty/src/diagnostics/unsafe_check.rs b/crates/hir-ty/src/diagnostics/unsafe_check.rs index 161b19a739..431ab949b4 100644 --- a/crates/hir-ty/src/diagnostics/unsafe_check.rs +++ b/crates/hir-ty/src/diagnostics/unsafe_check.rs @@ -18,7 +18,9 @@ pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> Vec { let is_unsafe = match def { DefWithBodyId::FunctionId(it) => db.function_data(it).has_unsafe_kw(), - DefWithBodyId::StaticId(_) | DefWithBodyId::ConstId(_) => false, + DefWithBodyId::StaticId(_) | DefWithBodyId::ConstId(_) | DefWithBodyId::VariantId(_) => { + false + } }; if is_unsafe { return res; diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index 874abdaea8..7f0baf49da 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -20,13 +20,14 @@ use hir_def::{ }; use hir_expand::{hygiene::Hygiene, name::Name}; use itertools::Itertools; +use smallvec::SmallVec; use syntax::SmolStr; use crate::{ db::HirDatabase, from_assoc_type_id, from_foreign_def_id, from_placeholder_idx, lt_from_placeholder_idx, mapping::from_chalk, - primitive, subst_prefix, to_assoc_type_id, + primitive, to_assoc_type_id, utils::{self, generics}, AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Const, ConstValue, DomainGoal, GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData, LifetimeOutlives, Mutability, @@ -221,6 +222,7 @@ pub enum DisplaySourceCodeError { PathNotFound, UnknownType, Closure, + Generator, } pub enum HirDisplayError { @@ -504,8 +506,15 @@ impl HirDisplay for Ty { let total_len = parent_params + self_param + type_params + const_params; // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self? if total_len > 0 { + // `parameters` are in the order of fn's params (including impl traits), + // parent's params (those from enclosing impl or trait, if any). + let parameters = parameters.as_slice(Interner); + let fn_params_len = self_param + type_params + const_params; + let fn_params = parameters.get(..fn_params_len); + let parent_params = parameters.get(parameters.len() - parent_params..); + let params = parent_params.into_iter().chain(fn_params).flatten(); write!(f, "<")?; - f.write_joined(¶meters.as_slice(Interner)[..total_len], ", ")?; + f.write_joined(params, ", ")?; write!(f, ">")?; } } @@ -577,9 +586,8 @@ impl HirDisplay for Ty { Some(x) => x, None => return true, }; - let actual_default = default_parameter - .clone() - .substitute(Interner, &subst_prefix(parameters, i)); + let actual_default = + default_parameter.clone().substitute(Interner, ¶meters); parameter != &actual_default } let mut default_from = 0; @@ -783,7 +791,34 @@ impl HirDisplay for Ty { write!(f, "{{unknown}}")?; } TyKind::InferenceVar(..) => write!(f, "_")?, - TyKind::Generator(..) => write!(f, "{{generator}}")?, + TyKind::Generator(_, subst) => { + if f.display_target.is_source_code() { + return Err(HirDisplayError::DisplaySourceCodeError( + DisplaySourceCodeError::Generator, + )); + } + + let subst = subst.as_slice(Interner); + let a: Option> = subst + .get(subst.len() - 3..) + .map(|args| args.iter().map(|arg| arg.ty(Interner)).collect()) + .flatten(); + + if let Some([resume_ty, yield_ty, ret_ty]) = a.as_deref() { + write!(f, "|")?; + resume_ty.hir_fmt(f)?; + write!(f, "|")?; + + write!(f, " yields ")?; + yield_ty.hir_fmt(f)?; + + write!(f, " -> ")?; + ret_ty.hir_fmt(f)?; + } else { + // This *should* be unreachable, but fallback just in case. + write!(f, "{{generator}}")?; + } + } TyKind::GeneratorWitness(..) => write!(f, "{{generator witness}}")?, } Ok(()) diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 9dbeba4f9f..0efff651cc 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -19,14 +19,15 @@ use std::sync::Arc; use chalk_ir::{cast::Cast, ConstValue, DebruijnIndex, Mutability, Safety, Scalar, TypeFlags}; use hir_def::{ body::Body, + builtin_type::BuiltinType, data::{ConstData, StaticData}, expr::{BindingAnnotation, ExprId, PatId}, lang_item::LangItemTarget, path::{path, Path}, resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs}, type_ref::TypeRef, - AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, HasModule, Lookup, - TraitId, TypeAliasId, VariantId, + AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, HasModule, + ItemContainerId, Lookup, TraitId, TypeAliasId, VariantId, }; use hir_expand::name::{name, Name}; use itertools::Either; @@ -67,6 +68,12 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc ctx.collect_const(&db.const_data(c)), DefWithBodyId::FunctionId(f) => ctx.collect_fn(f), DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_data(s)), + DefWithBodyId::VariantId(v) => { + ctx.return_ty = TyBuilder::builtin(match db.enum_data(v.parent).variant_body_type() { + Either::Left(builtin) => BuiltinType::Int(builtin), + Either::Right(builtin) => BuiltinType::Uint(builtin), + }); + } } ctx.infer_body(); @@ -332,7 +339,7 @@ pub struct InferenceResult { /// unresolved or missing subpatterns or subpatterns of mismatched types. pub type_of_pat: ArenaMap, type_mismatches: FxHashMap, - /// Interned Unknown to return references to. + /// Interned common types to return references to. standard_types: InternedStandardTypes, /// Stores the types which were implicitly dereferenced in pattern binding modes. pub pat_adjustments: FxHashMap>, @@ -412,6 +419,8 @@ pub(crate) struct InferenceContext<'a> { /// closures, but currently this is the only field that will change there, /// so it doesn't make sense. return_ty: Ty, + /// The resume type and the yield type, respectively, of the generator being inferred. + resume_yield_tys: Option<(Ty, Ty)>, diverges: Diverges, breakables: Vec, } @@ -476,6 +485,7 @@ impl<'a> InferenceContext<'a> { table: unify::InferenceTable::new(db, trait_env.clone()), trait_env, return_ty: TyKind::Error.intern(Interner), // set in collect_fn_signature + resume_yield_tys: None, db, owner, body, @@ -703,6 +713,8 @@ impl<'a> InferenceContext<'a> { &mut self, inner_ty: Ty, assoc_ty: Option, + // FIXME(GATs): these are args for the trait ref, args for assoc type itself should be + // handled when we support them. params: &[GenericArg], ) -> Ty { match assoc_ty { @@ -794,7 +806,18 @@ impl<'a> InferenceContext<'a> { self.resolve_variant_on_alias(ty, unresolved, path) } TypeNs::TypeAliasId(it) => { - let ty = TyBuilder::def_ty(self.db, it.into()) + let container = it.lookup(self.db.upcast()).container; + let parent_subst = match container { + ItemContainerId::TraitId(id) => { + let subst = TyBuilder::subst_for_def(self.db, id, None) + .fill_with_inference_vars(&mut self.table) + .build(); + Some(subst) + } + // Type aliases do not exist in impls. + _ => None, + }; + let ty = TyBuilder::def_ty(self.db, it.into(), parent_subst) .fill_with_inference_vars(&mut self.table) .build(); self.resolve_variant_on_alias(ty, unresolved, path) @@ -873,6 +896,12 @@ impl<'a> InferenceContext<'a> { fn resolve_into_iter_item(&self) -> Option { let path = path![core::iter::IntoIterator]; let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?; + self.db.trait_data(trait_).associated_type_by_name(&name![IntoIter]) + } + + fn resolve_iterator_item(&self) -> Option { + let path = path![core::iter::Iterator]; + let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?; self.db.trait_data(trait_).associated_type_by_name(&name![Item]) } diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs index 3ead929098..094e460dbf 100644 --- a/crates/hir-ty/src/infer/closure.rs +++ b/crates/hir-ty/src/infer/closure.rs @@ -12,6 +12,7 @@ use crate::{ use super::{Expectation, InferenceContext}; impl InferenceContext<'_> { + // This function handles both closures and generators. pub(super) fn deduce_closure_type_from_expectations( &mut self, closure_expr: ExprId, @@ -27,6 +28,11 @@ impl InferenceContext<'_> { // Deduction from where-clauses in scope, as well as fn-pointer coercion are handled here. let _ = self.coerce(Some(closure_expr), closure_ty, &expected_ty); + // Generators are not Fn* so return early. + if matches!(closure_ty.kind(Interner), TyKind::Generator(..)) { + return; + } + // Deduction based on the expected `dyn Fn` is done separately. if let TyKind::Dyn(dyn_ty) = expected_ty.kind(Interner) { if let Some(sig) = self.deduce_sig_from_dyn_ty(dyn_ty) { diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 2d04a864a2..f56108b26c 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -10,7 +10,10 @@ use chalk_ir::{ cast::Cast, fold::Shift, DebruijnIndex, GenericArgData, Mutability, TyVariableKind, }; use hir_def::{ - expr::{ArithOp, Array, BinaryOp, CmpOp, Expr, ExprId, LabelId, Literal, Statement, UnaryOp}, + expr::{ + ArithOp, Array, BinaryOp, ClosureKind, CmpOp, Expr, ExprId, LabelId, Literal, Statement, + UnaryOp, + }, generics::TypeOrConstParamData, path::{GenericArg, GenericArgs}, resolver::resolver_for_expr, @@ -204,8 +207,10 @@ impl<'a> InferenceContext<'a> { } &Expr::For { iterable, body, pat, label } => { let iterable_ty = self.infer_expr(iterable, &Expectation::none()); - let pat_ty = + let into_iter_ty = self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item()); + let pat_ty = + self.resolve_associated_type(into_iter_ty, self.resolve_iterator_item()); self.infer_pat(pat, &pat_ty, BindingMode::default()); self.with_breakable_ctx(BreakableKind::Loop, self.err_ty(), label, |this| { @@ -216,7 +221,7 @@ impl<'a> InferenceContext<'a> { self.diverges = Diverges::Maybe; TyBuilder::unit() } - Expr::Closure { body, args, ret_type, arg_types } => { + Expr::Closure { body, args, ret_type, arg_types, closure_kind } => { assert_eq!(args.len(), arg_types.len()); let mut sig_tys = Vec::new(); @@ -244,20 +249,40 @@ impl<'a> InferenceContext<'a> { ), }) .intern(Interner); - let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into(); - let closure_ty = - TyKind::Closure(closure_id, Substitution::from1(Interner, sig_ty.clone())) - .intern(Interner); + + let (ty, resume_yield_tys) = if matches!(closure_kind, ClosureKind::Generator(_)) { + // FIXME: report error when there are more than 1 parameter. + let resume_ty = match sig_tys.first() { + // When `sig_tys.len() == 1` the first type is the return type, not the + // first parameter type. + Some(ty) if sig_tys.len() > 1 => ty.clone(), + _ => self.result.standard_types.unit.clone(), + }; + let yield_ty = self.table.new_type_var(); + + let subst = TyBuilder::subst_for_generator(self.db, self.owner) + .push(resume_ty.clone()) + .push(yield_ty.clone()) + .push(ret_ty.clone()) + .build(); + + let generator_id = self.db.intern_generator((self.owner, tgt_expr)).into(); + let generator_ty = TyKind::Generator(generator_id, subst).intern(Interner); + + (generator_ty, Some((resume_ty, yield_ty))) + } else { + let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into(); + let closure_ty = + TyKind::Closure(closure_id, Substitution::from1(Interner, sig_ty.clone())) + .intern(Interner); + + (closure_ty, None) + }; // Eagerly try to relate the closure type with the expected // type, otherwise we often won't have enough information to // infer the body. - self.deduce_closure_type_from_expectations( - tgt_expr, - &closure_ty, - &sig_ty, - expected, - ); + self.deduce_closure_type_from_expectations(tgt_expr, &ty, &sig_ty, expected); // Now go through the argument patterns for (arg_pat, arg_ty) in args.iter().zip(sig_tys) { @@ -266,6 +291,8 @@ impl<'a> InferenceContext<'a> { let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe); let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone()); + let prev_resume_yield_tys = + mem::replace(&mut self.resume_yield_tys, resume_yield_tys); self.with_breakable_ctx(BreakableKind::Border, self.err_ty(), None, |this| { this.infer_expr_coerce(*body, &Expectation::has_type(ret_ty)); @@ -273,8 +300,9 @@ impl<'a> InferenceContext<'a> { self.diverges = prev_diverges; self.return_ty = prev_ret_ty; + self.resume_yield_tys = prev_resume_yield_tys; - closure_ty + ty } Expr::Call { callee, args, .. } => { let callee_ty = self.infer_expr(*callee, &Expectation::none()); @@ -423,11 +451,18 @@ impl<'a> InferenceContext<'a> { TyKind::Never.intern(Interner) } Expr::Yield { expr } => { - // FIXME: track yield type for coercion - if let Some(expr) = expr { - self.infer_expr(*expr, &Expectation::none()); + if let Some((resume_ty, yield_ty)) = self.resume_yield_tys.clone() { + if let Some(expr) = expr { + self.infer_expr_coerce(*expr, &Expectation::has_type(yield_ty)); + } else { + let unit = self.result.standard_types.unit.clone(); + let _ = self.coerce(Some(tgt_expr), &unit, &yield_ty); + } + resume_ty + } else { + // FIXME: report error (yield expr in non-generator) + TyKind::Error.intern(Interner) } - TyKind::Never.intern(Interner) } Expr::RecordLit { path, fields, spread, .. } => { let (ty, def_id) = self.resolve_variant(path.as_deref(), false); @@ -952,11 +987,13 @@ impl<'a> InferenceContext<'a> { let lhs_ty = self.infer_expr(lhs, &lhs_expectation); let rhs_ty = self.table.new_type_var(); - let func = lang_names_for_bin_op(op).and_then(|(name, lang_item)| { - self.db.trait_data(self.resolve_lang_item(lang_item)?.as_trait()?).method_by_name(&name) + let trait_func = lang_names_for_bin_op(op).and_then(|(name, lang_item)| { + let trait_id = self.resolve_lang_item(lang_item)?.as_trait()?; + let func = self.db.trait_data(trait_id).method_by_name(&name)?; + Some((trait_id, func)) }); - let func = match func { - Some(func) => func, + let (trait_, func) = match trait_func { + Some(it) => it, None => { let rhs_ty = self.builtin_binary_op_rhs_expectation(op, lhs_ty.clone()); let rhs_ty = self.infer_expr_coerce(rhs, &Expectation::from_option(rhs_ty)); @@ -966,7 +1003,9 @@ impl<'a> InferenceContext<'a> { } }; - let subst = TyBuilder::subst_for_def(self.db, func) + // HACK: We can use this substitution for the function because the function itself doesn't + // have its own generic parameters. + let subst = TyBuilder::subst_for_def(self.db, trait_, None) .push(lhs_ty.clone()) .push(rhs_ty.clone()) .build(); @@ -1245,19 +1284,7 @@ impl<'a> InferenceContext<'a> { assert_eq!(self_params, 0); // method shouldn't have another Self param let total_len = parent_params + type_params + const_params + impl_trait_params; let mut substs = Vec::with_capacity(total_len); - // Parent arguments are unknown - for (id, param) in def_generics.iter_parent() { - match param { - TypeOrConstParamData::TypeParamData(_) => { - substs.push(GenericArgData::Ty(self.table.new_type_var()).intern(Interner)); - } - TypeOrConstParamData::ConstParamData(_) => { - let ty = self.db.const_param_ty(ConstParamId::from_unchecked(id)); - substs - .push(GenericArgData::Const(self.table.new_const_var(ty)).intern(Interner)); - } - } - } + // handle provided arguments if let Some(generic_args) = generic_args { // if args are provided, it should be all of them, but we can't rely on that @@ -1266,7 +1293,7 @@ impl<'a> InferenceContext<'a> { .iter() .filter(|arg| !matches!(arg, GenericArg::Lifetime(_))) .take(type_params + const_params) - .zip(def_generics.iter_id().skip(parent_params)) + .zip(def_generics.iter_id()) { if let Some(g) = generic_arg_to_chalk( self.db, @@ -1290,6 +1317,9 @@ impl<'a> InferenceContext<'a> { } } }; + + // Handle everything else as unknown. This also handles generic arguments for the method's + // parent (impl or trait), which should come after those for the method. for (id, data) in def_generics.iter().skip(substs.len()) { match data { TypeOrConstParamData::TypeParamData(_) => { @@ -1327,9 +1357,13 @@ impl<'a> InferenceContext<'a> { CallableDefId::FunctionId(f) => { if let ItemContainerId::TraitId(trait_) = f.lookup(self.db.upcast()).container { // construct a TraitRef - let substs = crate::subst_prefix( - &*parameters, - generics(self.db.upcast(), trait_.into()).len(), + let params_len = parameters.len(Interner); + let trait_params_len = generics(self.db.upcast(), trait_.into()).len(); + let substs = Substitution::from_iter( + Interner, + // The generic parameters for the trait come after those for the + // function. + ¶meters.as_slice(Interner)[params_len - trait_params_len..], ); self.push_obligation( TraitRef { trait_id: to_chalk_trait_id(trait_), substitution: substs } diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs index f580e09e91..7a4754cdc7 100644 --- a/crates/hir-ty/src/infer/path.rs +++ b/crates/hir-ty/src/infer/path.rs @@ -12,8 +12,8 @@ use crate::{ builder::ParamKind, consteval, method_resolution::{self, VisibleFromModule}, - GenericArgData, Interner, Substitution, TraitRefExt, Ty, TyBuilder, TyExt, TyKind, - ValueTyDefId, + utils::generics, + Interner, Substitution, TraitRefExt, Ty, TyBuilder, TyExt, TyKind, ValueTyDefId, }; use super::{ExprOrPatId, InferenceContext, TraitRef}; @@ -96,17 +96,21 @@ impl<'a> InferenceContext<'a> { ValueNs::GenericParam(it) => return Some(self.db.const_param_ty(it)), }; - let parent_substs = self_subst.unwrap_or_else(|| Substitution::empty(Interner)); let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver); let substs = ctx.substs_from_path(path, typable, true); - let mut it = substs.as_slice(Interner)[parent_substs.len(Interner)..].iter().cloned(); - let ty = TyBuilder::value_ty(self.db, typable) - .use_parent_substs(&parent_substs) + let substs = substs.as_slice(Interner); + let parent_substs = self_subst.or_else(|| { + let generics = generics(self.db.upcast(), typable.to_generic_def_id()?); + let parent_params_len = generics.parent_generics()?.len(); + let parent_args = &substs[substs.len() - parent_params_len..]; + Some(Substitution::from_iter(Interner, parent_args)) + }); + let parent_substs_len = parent_substs.as_ref().map_or(0, |s| s.len(Interner)); + let mut it = substs.iter().take(substs.len() - parent_substs_len).cloned(); + let ty = TyBuilder::value_ty(self.db, typable, parent_substs) .fill(|x| { it.next().unwrap_or_else(|| match x { - ParamKind::Type => { - GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner) - } + ParamKind::Type => TyKind::Error.intern(Interner).cast(Interner), ParamKind::Const(ty) => consteval::unknown_const_as_generic(ty.clone()), }) }) @@ -249,7 +253,7 @@ impl<'a> InferenceContext<'a> { }; let substs = match container { ItemContainerId::ImplId(impl_id) => { - let impl_substs = TyBuilder::subst_for_def(self.db, impl_id) + let impl_substs = TyBuilder::subst_for_def(self.db, impl_id, None) .fill_with_inference_vars(&mut self.table) .build(); let impl_self_ty = diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs index e77b55670b..6ccd0b215c 100644 --- a/crates/hir-ty/src/infer/unify.rs +++ b/crates/hir-ty/src/infer/unify.rs @@ -598,11 +598,14 @@ impl<'a> InferenceTable<'a> { .build(); let projection = { - let b = TyBuilder::assoc_type_projection(self.db, output_assoc_type); + let b = TyBuilder::subst_for_def(self.db, fn_once_trait, None); if b.remaining() != 2 { return None; } - b.push(ty.clone()).push(arg_ty).build() + let fn_once_subst = b.push(ty.clone()).push(arg_ty).build(); + + TyBuilder::assoc_type_projection(self.db, output_assoc_type, Some(fn_once_subst)) + .build() }; let trait_env = self.trait_env.env.clone(); diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index 532544fee5..223d705b15 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -306,7 +306,7 @@ impl<'a> TyLoweringContext<'a> { // FIXME we're probably doing something wrong here self.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16); let ( - parent_params, + _parent_params, self_params, list_params, const_params, @@ -319,7 +319,7 @@ impl<'a> TyLoweringContext<'a> { }; TyKind::BoundVar(BoundVar::new( self.in_binders, - idx as usize + parent_params + self_params + list_params + const_params, + idx as usize + self_params + list_params + const_params, )) .intern(Interner) } @@ -499,14 +499,31 @@ impl<'a> TyLoweringContext<'a> { .intern(Interner) } TypeNs::SelfType(impl_id) => { - let generics = generics(self.db.upcast(), impl_id.into()); - let substs = match self.type_param_mode { - ParamLoweringMode::Placeholder => generics.placeholder_subst(self.db), - ParamLoweringMode::Variable => { - generics.bound_vars_subst(self.db, self.in_binders) + let def = + self.resolver.generic_def().expect("impl should have generic param scope"); + let generics = generics(self.db.upcast(), def); + + match self.type_param_mode { + ParamLoweringMode::Placeholder => { + // `def` can be either impl itself or item within, and we need impl itself + // now. + let generics = generics.parent_generics().unwrap_or(&generics); + let subst = generics.placeholder_subst(self.db); + self.db.impl_self_ty(impl_id).substitute(Interner, &subst) } - }; - self.db.impl_self_ty(impl_id).substitute(Interner, &substs) + ParamLoweringMode::Variable => { + let starting_from = match def { + GenericDefId::ImplId(_) => 0, + // `def` is an item within impl. We need to substitute `BoundVar`s but + // remember that they are for parent (i.e. impl) generic params so they + // come after our own params. + _ => generics.len_self(), + }; + TyBuilder::impl_self_ty(self.db, impl_id) + .fill_with_bound_vars(self.in_binders, starting_from) + .build() + } + } } TypeNs::AdtSelfType(adt) => { let generics = generics(self.db.upcast(), adt.into()); @@ -663,40 +680,31 @@ impl<'a> TyLoweringContext<'a> { fn substs_from_path_segment( &self, segment: PathSegment<'_>, - def_generic: Option, + def: Option, infer_args: bool, explicit_self_ty: Option, ) -> Substitution { + // Remember that the item's own generic args come before its parent's. let mut substs = Vec::new(); - let def_generics = if let Some(def) = def_generic { - generics(self.db.upcast(), def) + let def = if let Some(d) = def { + d } else { return Substitution::empty(Interner); }; + let def_generics = generics(self.db.upcast(), def); let (parent_params, self_params, type_params, const_params, impl_trait_params) = def_generics.provenance_split(); - let total_len = - parent_params + self_params + type_params + const_params + impl_trait_params; + let item_len = self_params + type_params + const_params + impl_trait_params; + let total_len = parent_params + item_len; - let ty_error = GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner); + let ty_error = TyKind::Error.intern(Interner).cast(Interner); let mut def_generic_iter = def_generics.iter_id(); - for _ in 0..parent_params { - if let Some(eid) = def_generic_iter.next() { - match eid { - Either::Left(_) => substs.push(ty_error.clone()), - Either::Right(x) => { - substs.push(unknown_const_as_generic(self.db.const_param_ty(x))) - } - } - } - } - let fill_self_params = || { for x in explicit_self_ty .into_iter() - .map(|x| GenericArgData::Ty(x).intern(Interner)) + .map(|x| x.cast(Interner)) .chain(iter::repeat(ty_error.clone())) .take(self_params) { @@ -757,37 +765,40 @@ impl<'a> TyLoweringContext<'a> { fill_self_params(); } + // These params include those of parent. + let remaining_params: SmallVec<[_; 2]> = def_generic_iter + .map(|eid| match eid { + Either::Left(_) => ty_error.clone(), + Either::Right(x) => unknown_const_as_generic(self.db.const_param_ty(x)), + }) + .collect(); + assert_eq!(remaining_params.len() + substs.len(), total_len); + // handle defaults. In expression or pattern path segments without // explicitly specified type arguments, missing type arguments are inferred // (i.e. defaults aren't used). if !infer_args || had_explicit_args { - if let Some(def_generic) = def_generic { - let defaults = self.db.generic_defaults(def_generic); - assert_eq!(total_len, defaults.len()); + let defaults = self.db.generic_defaults(def); + assert_eq!(total_len, defaults.len()); + let parent_from = item_len - substs.len(); - for default_ty in defaults.iter().skip(substs.len()) { - // each default can depend on the previous parameters - let substs_so_far = Substitution::from_iter(Interner, substs.clone()); - if let Some(_id) = def_generic_iter.next() { - substs.push(default_ty.clone().substitute(Interner, &substs_so_far)); - } - } + for (idx, default_ty) in defaults[substs.len()..item_len].iter().enumerate() { + // each default can depend on the previous parameters + let substs_so_far = Substitution::from_iter( + Interner, + substs.iter().cloned().chain(remaining_params[idx..].iter().cloned()), + ); + substs.push(default_ty.clone().substitute(Interner, &substs_so_far)); } + + // Keep parent's params as unknown. + let mut remaining_params = remaining_params; + substs.extend(remaining_params.drain(parent_from..)); + } else { + substs.extend(remaining_params); } - // add placeholders for args that were not provided - // FIXME: emit diagnostics in contexts where this is not allowed - for eid in def_generic_iter { - match eid { - Either::Left(_) => substs.push(ty_error.clone()), - Either::Right(x) => { - substs.push(unknown_const_as_generic(self.db.const_param_ty(x))) - } - } - } - // If this assert fails, it means you pushed into subst but didn't call .next() of def_generic_iter assert_eq!(substs.len(), total_len); - Substitution::from_iter(Interner, substs) } @@ -981,10 +992,11 @@ impl<'a> TyLoweringContext<'a> { fn lower_dyn_trait(&self, bounds: &[Interned]) -> Ty { let self_ty = TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner); - // INVARIANT: The principal trait bound must come first. Others may be in any order but - // should be in the same order for the same set but possibly different order of bounds in - // the input. - // This invariant is used by `TyExt::dyn_trait()` and chalk. + // INVARIANT: The principal trait bound, if present, must come first. Others may be in any + // order but should be in the same order for the same set but possibly different order of + // bounds in the input. + // INVARIANT: If this function returns `DynTy`, there should be at least one trait bound. + // These invariants are utilized by `TyExt::dyn_trait()` and chalk. let bounds = self.with_shifted_in(DebruijnIndex::ONE, |ctx| { let mut bounds: Vec<_> = bounds .iter() @@ -1035,6 +1047,12 @@ impl<'a> TyLoweringContext<'a> { return None; } + if bounds.first().and_then(|b| b.trait_id()).is_none() { + // When there's no trait bound, that's an error. This happens when the trait refs + // are unresolved. + return None; + } + // As multiple occurrences of the same auto traits *are* permitted, we dedulicate the // bounds. We shouldn't have repeated elements besides auto traits at this point. bounds.dedup(); @@ -1046,7 +1064,8 @@ impl<'a> TyLoweringContext<'a> { let bounds = crate::make_single_type_binders(bounds); TyKind::Dyn(DynTy { bounds, lifetime: static_lifetime() }).intern(Interner) } else { - // FIXME: report error (additional non-auto traits or associated type rebound) + // FIXME: report error + // (additional non-auto traits, associated type rebound, or no resolved trait) TyKind::Error.intern(Interner) } } @@ -1139,11 +1158,28 @@ fn named_associated_type_shorthand_candidates( }; match res { - TypeNs::SelfType(impl_id) => search( + TypeNs::SelfType(impl_id) => { // we're _in_ the impl -- the binders get added back later. Correct, // but it would be nice to make this more explicit - db.impl_trait(impl_id)?.into_value_and_skipped_binders().0, - ), + let trait_ref = db.impl_trait(impl_id)?.into_value_and_skipped_binders().0; + + let impl_id_as_generic_def: GenericDefId = impl_id.into(); + if impl_id_as_generic_def != def { + // `trait_ref` contains `BoundVar`s bound by impl's `Binders`, but here we need + // `BoundVar`s from `def`'s point of view. + // FIXME: A `HirDatabase` query may be handy if this process is needed in more + // places. It'd be almost identical as `impl_trait_query` where `resolver` would be + // of `def` instead of `impl_id`. + let starting_idx = generics(db.upcast(), def).len_self(); + let subst = TyBuilder::subst_for_def(db, impl_id, None) + .fill_with_bound_vars(DebruijnIndex::INNERMOST, starting_idx) + .build(); + let trait_ref = subst.apply(trait_ref, Interner); + search(trait_ref) + } else { + search(trait_ref) + } + } TypeNs::GenericParam(param_id) => { let predicates = db.generic_predicates_for_param(def, param_id.into(), assoc_name); let res = predicates.iter().find_map(|pred| match pred.skip_binders().skip_binders() { @@ -1160,10 +1196,18 @@ fn named_associated_type_shorthand_candidates( } // Handle `Self::Type` referring to own associated type in trait definitions if let GenericDefId::TraitId(trait_id) = param_id.parent() { - let generics = generics(db.upcast(), trait_id.into()); - if generics.params.type_or_consts[param_id.local_id()].is_trait_self() { + let trait_generics = generics(db.upcast(), trait_id.into()); + if trait_generics.params.type_or_consts[param_id.local_id()].is_trait_self() { + let def_generics = generics(db.upcast(), def); + let starting_idx = match def { + GenericDefId::TraitId(_) => 0, + // `def` is an item within trait. We need to substitute `BoundVar`s but + // remember that they are for parent (i.e. trait) generic params so they + // come after our own params. + _ => def_generics.len_self(), + }; let trait_ref = TyBuilder::trait_ref(db, trait_id) - .fill_with_bound_vars(DebruijnIndex::INNERMOST, 0) + .fill_with_bound_vars(DebruijnIndex::INNERMOST, starting_idx) .build(); return search(trait_ref); } @@ -1405,6 +1449,7 @@ pub(crate) fn generic_defaults_query( let ctx = TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable); let generic_params = generics(db.upcast(), def); + let parent_start_idx = generic_params.len_self(); let defaults = generic_params .iter() @@ -1417,19 +1462,17 @@ pub(crate) fn generic_defaults_query( let val = unknown_const_as_generic( db.const_param_ty(ConstParamId::from_unchecked(id)), ); - return crate::make_binders_with_count(db, idx, &generic_params, val); + return make_binders(db, &generic_params, val); } }; let mut ty = p.default.as_ref().map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t)); // Each default can only refer to previous parameters. - // type variable default referring to parameter coming - // after it. This is forbidden (FIXME: report - // diagnostic) - ty = fallback_bound_vars(ty, idx); - let val = GenericArgData::Ty(ty).intern(Interner); - crate::make_binders_with_count(db, idx, &generic_params, val) + // Type variable default referring to parameter coming + // after it is forbidden (FIXME: report diagnostic) + ty = fallback_bound_vars(ty, idx, parent_start_idx); + crate::make_binders(db, &generic_params, ty.cast(Interner)) }) .collect(); @@ -1446,15 +1489,14 @@ pub(crate) fn generic_defaults_recover( // we still need one default per parameter let defaults = generic_params .iter_id() - .enumerate() - .map(|(count, id)| { + .map(|id| { let val = match id { itertools::Either::Left(_) => { GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner) } itertools::Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)), }; - crate::make_binders_with_count(db, count, &generic_params, val) + crate::make_binders(db, &generic_params, val) }) .collect(); @@ -1633,6 +1675,19 @@ pub enum ValueTyDefId { } impl_from!(FunctionId, StructId, UnionId, EnumVariantId, ConstId, StaticId for ValueTyDefId); +impl ValueTyDefId { + pub(crate) fn to_generic_def_id(self) -> Option { + match self { + Self::FunctionId(id) => Some(id.into()), + Self::StructId(id) => Some(id.into()), + Self::UnionId(id) => Some(id.into()), + Self::EnumVariantId(var) => Some(var.into()), + Self::ConstId(id) => Some(id.into()), + Self::StaticId(_) => None, + } + } +} + /// Build the declared type of an item. This depends on the namespace; e.g. for /// `struct Foo(usize)`, we have two types: The type of the struct itself, and /// the constructor function `(usize) -> Foo` which lives in the values @@ -1816,26 +1871,48 @@ pub(crate) fn const_or_path_to_chalk( } } -/// This replaces any 'free' Bound vars in `s` (i.e. those with indices past -/// num_vars_to_keep) by `TyKind::Unknown`. +/// Replaces any 'free' `BoundVar`s in `s` by `TyKind::Error` from the perspective of generic +/// parameter whose index is `param_index`. A `BoundVar` is free when it is or (syntactically) +/// appears after the generic parameter of `param_index`. fn fallback_bound_vars + HasInterner>( s: T, - num_vars_to_keep: usize, + param_index: usize, + parent_start: usize, ) -> T { + // Keep in mind that parent generic parameters, if any, come *after* those of the item in + // question. In the diagrams below, `c*` and `p*` represent generic parameters of the item and + // its parent respectively. + let is_allowed = |index| { + if param_index < parent_start { + // The parameter of `param_index` is one from the item in question. Any parent generic + // parameters or the item's generic parameters that come before `param_index` is + // allowed. + // [c1, .., cj, .., ck, p1, .., pl] where cj is `param_index` + // ^^^^^^ ^^^^^^^^^^ these are allowed + !(param_index..parent_start).contains(&index) + } else { + // The parameter of `param_index` is one from the parent generics. Only parent generic + // parameters that come before `param_index` are allowed. + // [c1, .., ck, p1, .., pj, .., pl] where pj is `param_index` + // ^^^^^^ these are allowed + (parent_start..param_index).contains(&index) + } + }; + crate::fold_free_vars( s, |bound, binders| { - if bound.index >= num_vars_to_keep && bound.debruijn == DebruijnIndex::INNERMOST { - TyKind::Error.intern(Interner) - } else { + if bound.index_if_innermost().map_or(true, is_allowed) { bound.shifted_in_from(binders).to_ty(Interner) + } else { + TyKind::Error.intern(Interner) } }, |ty, bound, binders| { - if bound.index >= num_vars_to_keep && bound.debruijn == DebruijnIndex::INNERMOST { - unknown_const(ty.clone()) - } else { + if bound.index_if_innermost().map_or(true, is_allowed) { bound.shifted_in_from(binders).to_const(Interner, ty) + } else { + unknown_const(ty.clone()) } }, ) diff --git a/crates/hir-ty/src/mapping.rs b/crates/hir-ty/src/mapping.rs index d765fee0e1..f80fb39c1f 100644 --- a/crates/hir-ty/src/mapping.rs +++ b/crates/hir-ty/src/mapping.rs @@ -103,6 +103,18 @@ impl From for chalk_ir::ClosureId { } } +impl From> for crate::db::InternedGeneratorId { + fn from(id: chalk_ir::GeneratorId) -> Self { + Self::from_intern_id(id.0) + } +} + +impl From for chalk_ir::GeneratorId { + fn from(id: crate::db::InternedGeneratorId) -> Self { + chalk_ir::GeneratorId(id.as_intern_id()) + } +} + pub fn to_foreign_def_id(id: TypeAliasId) -> ForeignDefId { chalk_ir::ForeignDefId(salsa::InternKey::as_intern_id(&id)) } diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index cc21990d55..3a1a3f4fde 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -654,7 +654,7 @@ fn find_matching_impl( let r = table.run_in_snapshot(|table| { let impl_data = db.impl_data(impl_); let substs = - TyBuilder::subst_for_def(db, impl_).fill_with_inference_vars(table).build(); + TyBuilder::subst_for_def(db, impl_, None).fill_with_inference_vars(table).build(); let impl_ty = db.impl_self_ty(impl_).substitute(Interner, &substs); table @@ -1147,10 +1147,9 @@ fn is_valid_candidate( })); if let ItemContainerId::ImplId(impl_id) = c.lookup(db.upcast()).container { let self_ty_matches = table.run_in_snapshot(|table| { - let subst = - TyBuilder::subst_for_def(db, c).fill_with_inference_vars(table).build(); - let expected_self_ty = - subst.apply(db.impl_self_ty(impl_id).skip_binders().clone(), Interner); + let expected_self_ty = TyBuilder::impl_self_ty(db, impl_id) + .fill_with_inference_vars(table) + .build(); table.unify(&expected_self_ty, &self_ty) }); if !self_ty_matches { @@ -1186,31 +1185,26 @@ fn is_valid_fn_candidate( table.run_in_snapshot(|table| { let container = fn_id.lookup(db.upcast()).container; - let impl_subst = match container { + let (impl_subst, expect_self_ty) = match container { ItemContainerId::ImplId(it) => { - TyBuilder::subst_for_def(db, it).fill_with_inference_vars(table).build() + let subst = + TyBuilder::subst_for_def(db, it, None).fill_with_inference_vars(table).build(); + let self_ty = db.impl_self_ty(it).substitute(Interner, &subst); + (subst, self_ty) } ItemContainerId::TraitId(it) => { - TyBuilder::subst_for_def(db, it).fill_with_inference_vars(table).build() + let subst = + TyBuilder::subst_for_def(db, it, None).fill_with_inference_vars(table).build(); + let self_ty = subst.at(Interner, 0).assert_ty_ref(Interner).clone(); + (subst, self_ty) } _ => unreachable!(), }; - let fn_subst = TyBuilder::subst_for_def(db, fn_id) - .use_parent_substs(&impl_subst) + let fn_subst = TyBuilder::subst_for_def(db, fn_id, Some(impl_subst.clone())) .fill_with_inference_vars(table) .build(); - let expect_self_ty = match container { - ItemContainerId::TraitId(_) => fn_subst.at(Interner, 0).assert_ty_ref(Interner).clone(), - ItemContainerId::ImplId(impl_id) => { - fn_subst.apply(db.impl_self_ty(impl_id).skip_binders().clone(), Interner) - } - // We should only get called for associated items (impl/trait) - ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => { - unreachable!() - } - }; check_that!(table.unify(&expect_self_ty, self_ty)); if let Some(receiver_ty) = receiver_ty { diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs index d2f13e4351..ebbc541014 100644 --- a/crates/hir-ty/src/tests.rs +++ b/crates/hir-ty/src/tests.rs @@ -16,7 +16,7 @@ use base_db::{fixture::WithFixture, FileRange, SourceDatabaseExt}; use expect_test::Expect; use hir_def::{ body::{Body, BodySourceMap, SyntheticSyntax}, - db::DefDatabase, + db::{DefDatabase, InternDatabase}, expr::{ExprId, PatId}, item_scope::ItemScope, nameres::DefMap, @@ -135,6 +135,10 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour let loc = it.lookup(&db); loc.source(&db).value.syntax().text_range().start() } + DefWithBodyId::VariantId(it) => { + let loc = db.lookup_intern_enum(it.parent); + loc.source(&db).value.syntax().text_range().start() + } }); let mut unexpected_type_mismatches = String::new(); for def in defs { @@ -388,6 +392,10 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { let loc = it.lookup(&db); loc.source(&db).value.syntax().text_range().start() } + DefWithBodyId::VariantId(it) => { + let loc = db.lookup_intern_enum(it.parent); + loc.source(&db).value.syntax().text_range().start() + } }); for def in defs { let (_body, source_map) = db.body_with_source_map(def); @@ -453,6 +461,18 @@ fn visit_module( let body = db.body(def); visit_body(db, &body, cb); } + ModuleDefId::AdtId(hir_def::AdtId::EnumId(it)) => { + db.enum_data(it) + .variants + .iter() + .map(|(id, _)| hir_def::EnumVariantId { parent: it, local_id: id }) + .for_each(|it| { + let def = it.into(); + cb(def); + let body = db.body(def); + visit_body(db, &body, cb); + }); + } ModuleDefId::TraitId(it) => { let trait_data = db.trait_data(it); for &(_, item) in trait_data.items.iter() { diff --git a/crates/hir-ty/src/tests/coercion.rs b/crates/hir-ty/src/tests/coercion.rs index bf59fadc2c..d301595bcd 100644 --- a/crates/hir-ty/src/tests/coercion.rs +++ b/crates/hir-ty/src/tests/coercion.rs @@ -294,6 +294,24 @@ fn foo() { ); } +#[test] +fn generator_yield_return_coerce() { + check_no_mismatches( + r#" +fn test() { + let g = || { + yield &1u32; + yield &&1u32; + if true { + return &1u32; + } + &&1u32 + }; +} + "#, + ); +} + #[test] fn assign_coerce() { check_no_mismatches( diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs index 23e51a9c16..a155adcec6 100644 --- a/crates/hir-ty/src/tests/regression.rs +++ b/crates/hir-ty/src/tests/regression.rs @@ -1488,7 +1488,6 @@ fn regression_11688_4() { #[test] fn gat_crash_1() { - cov_mark::check!(ignore_gats); check_no_mismatches( r#" trait ATrait {} @@ -1527,30 +1526,22 @@ unsafe impl Storage for InlineStorage { #[test] fn gat_crash_3() { - // FIXME: This test currently crashes rust analyzer in a debug build but not in a - // release build (i.e. for the user). With the assumption that tests will always be run - // in debug mode, we catch the unwind and expect that it panicked. See the - // [`crate::utils::generics`] function for more information. - cov_mark::check!(ignore_gats); - std::panic::catch_unwind(|| { - check_no_mismatches( - r#" + check_no_mismatches( + r#" trait Collection { - type Item; - type Member: Collection; - fn add(&mut self, value: Self::Item) -> Result<(), Self::Error>; +type Item; +type Member: Collection; +fn add(&mut self, value: Self::Item) -> Result<(), Self::Error>; } struct ConstGen { - data: [T; N], +data: [T; N], } impl Collection for ConstGen { - type Item = T; - type Member = ConstGen; +type Item = T; +type Member = ConstGen; } - "#, - ); - }) - .expect_err("must panic"); + "#, + ); } #[test] @@ -1691,3 +1682,28 @@ fn macrostmts() -> u8 { "#, ); } + +#[test] +fn dyn_with_unresolved_trait() { + check_types( + r#" +fn foo(a: &dyn DoesNotExist) { + a.bar(); + //^&{unknown} +} + "#, + ); +} + +#[test] +fn self_assoc_with_const_generics_crash() { + check_no_mismatches( + r#" +trait Trait { type Item; } +impl Trait for [T; N] { + type Item = (); + fn f(_: Self::Item) {} +} + "#, + ); +} diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs index 4ea103e5d9..080e2ac1b8 100644 --- a/crates/hir-ty/src/tests/simple.rs +++ b/crates/hir-ty/src/tests/simple.rs @@ -1693,16 +1693,16 @@ fn infer_type_param() { fn infer_const() { check_infer( r#" - struct Foo; - impl Foo { const ASSOC_CONST: u32 = 0; } - const GLOBAL_CONST: u32 = 101; - fn test() { - const LOCAL_CONST: u32 = 99; - let x = LOCAL_CONST; - let z = GLOBAL_CONST; - let id = Foo::ASSOC_CONST; - } - "#, +struct Foo; +impl Foo { const ASSOC_CONST: u32 = 0; } +const GLOBAL_CONST: u32 = 101; +fn test() { + const LOCAL_CONST: u32 = 99; + let x = LOCAL_CONST; + let z = GLOBAL_CONST; + let id = Foo::ASSOC_CONST; +} +"#, expect![[r#" 48..49 '0': u32 79..82 '101': u32 @@ -1722,17 +1722,17 @@ fn infer_const() { fn infer_static() { check_infer( r#" - static GLOBAL_STATIC: u32 = 101; - static mut GLOBAL_STATIC_MUT: u32 = 101; - fn test() { - static LOCAL_STATIC: u32 = 99; - static mut LOCAL_STATIC_MUT: u32 = 99; - let x = LOCAL_STATIC; - let y = LOCAL_STATIC_MUT; - let z = GLOBAL_STATIC; - let w = GLOBAL_STATIC_MUT; - } - "#, +static GLOBAL_STATIC: u32 = 101; +static mut GLOBAL_STATIC_MUT: u32 = 101; +fn test() { + static LOCAL_STATIC: u32 = 99; + static mut LOCAL_STATIC_MUT: u32 = 99; + let x = LOCAL_STATIC; + let y = LOCAL_STATIC_MUT; + let z = GLOBAL_STATIC; + let w = GLOBAL_STATIC_MUT; +} +"#, expect![[r#" 28..31 '101': u32 69..72 '101': u32 @@ -1751,6 +1751,41 @@ fn infer_static() { ); } +#[test] +fn infer_enum_variant() { + check_infer( + r#" +enum Foo { + A = 15, + B = Foo::A as isize + 1 +} +"#, + expect![[r#" + 19..21 '15': isize + 31..37 'Foo::A': Foo + 31..46 'Foo::A as isize': isize + 31..50 'Foo::A...ze + 1': isize + 49..50 '1': isize + "#]], + ); + check_infer( + r#" +#[repr(u32)] +enum Foo { + A = 15, + B = Foo::A as u32 + 1 +} +"#, + expect![[r#" + 32..34 '15': u32 + 44..50 'Foo::A': Foo + 44..57 'Foo::A as u32': u32 + 44..61 'Foo::A...32 + 1': u32 + 60..61 '1': u32 + "#]], + ); +} + #[test] fn shadowing_primitive() { check_types( @@ -1917,6 +1952,88 @@ fn closure_return_inferred() { ); } +#[test] +fn generator_types_inferred() { + check_infer( + r#" +//- minicore: generator, deref +use core::ops::{Generator, GeneratorState}; +use core::pin::Pin; + +fn f(v: i64) {} +fn test() { + let mut g = |r| { + let a = yield 0; + let a = yield 1; + let a = yield 2; + "return value" + }; + + match Pin::new(&mut g).resume(0usize) { + GeneratorState::Yielded(y) => { f(y); } + GeneratorState::Complete(r) => {} + } +} + "#, + expect![[r#" + 70..71 'v': i64 + 78..80 '{}': () + 91..362 '{ ... } }': () + 101..106 'mut g': |usize| yields i64 -> &str + 109..218 '|r| { ... }': |usize| yields i64 -> &str + 110..111 'r': usize + 113..218 '{ ... }': &str + 127..128 'a': usize + 131..138 'yield 0': usize + 137..138 '0': i64 + 152..153 'a': usize + 156..163 'yield 1': usize + 162..163 '1': i64 + 177..178 'a': usize + 181..188 'yield 2': usize + 187..188 '2': i64 + 198..212 '"return value"': &str + 225..360 'match ... }': () + 231..239 'Pin::new': fn new<&mut |usize| yields i64 -> &str>(&mut |usize| yields i64 -> &str) -> Pin<&mut |usize| yields i64 -> &str> + 231..247 'Pin::n...mut g)': Pin<&mut |usize| yields i64 -> &str> + 231..262 'Pin::n...usize)': GeneratorState + 240..246 '&mut g': &mut |usize| yields i64 -> &str + 245..246 'g': |usize| yields i64 -> &str + 255..261 '0usize': usize + 273..299 'Genera...ded(y)': GeneratorState + 297..298 'y': i64 + 303..312 '{ f(y); }': () + 305..306 'f': fn f(i64) + 305..309 'f(y)': () + 307..308 'y': i64 + 321..348 'Genera...ete(r)': GeneratorState + 346..347 'r': &str + 352..354 '{}': () + "#]], + ); +} + +#[test] +fn generator_resume_yield_return_unit() { + check_no_mismatches( + r#" +//- minicore: generator, deref +use core::ops::{Generator, GeneratorState}; +use core::pin::Pin; +fn test() { + let mut g = || { + let () = yield; + }; + + match Pin::new(&mut g).resume(()) { + GeneratorState::Yielded(()) => {} + GeneratorState::Complete(()) => {} + } +} + "#, + ); +} + #[test] fn fn_pointer_return() { check_infer( diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index 21a8631976..555b6972fb 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs @@ -279,6 +279,10 @@ fn test() { pub mod iter { pub trait IntoIterator { type Item; + type IntoIter: Iterator; + } + pub trait Iterator { + type Item; } } pub mod prelude { @@ -297,7 +301,13 @@ pub mod collections { } impl IntoIterator for Vec { - type Item=T; + type Item = T; + type IntoIter = IntoIter; + } + + struct IntoIter {} + impl Iterator for IntoIter { + type Item = T; } } "#, diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs index d6638db028..e54bcb421a 100644 --- a/crates/hir-ty/src/utils.rs +++ b/crates/hir-ty/src/utils.rs @@ -4,7 +4,7 @@ use std::iter; use base_db::CrateId; -use chalk_ir::{fold::Shift, BoundVar, DebruijnIndex}; +use chalk_ir::{cast::Cast, fold::Shift, BoundVar, DebruijnIndex}; use hir_def::{ db::DefDatabase, generics::{ @@ -24,8 +24,7 @@ use smallvec::{smallvec, SmallVec}; use syntax::SmolStr; use crate::{ - db::HirDatabase, ChalkTraitId, ConstData, ConstValue, GenericArgData, Interner, Substitution, - TraitRef, TraitRefExt, TyKind, WhereClause, + db::HirDatabase, ChalkTraitId, Interner, Substitution, TraitRef, TraitRefExt, WhereClause, }; pub(crate) fn fn_traits(db: &dyn DefDatabase, krate: CrateId) -> impl Iterator { @@ -174,31 +173,6 @@ pub(super) fn associated_type_by_name_including_super_traits( pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics { let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def))); - if parent_generics.is_some() && matches!(def, GenericDefId::TypeAliasId(_)) { - let params = db.generic_params(def); - let parent_params = &parent_generics.as_ref().unwrap().params; - let has_consts = - params.iter().any(|(_, x)| matches!(x, TypeOrConstParamData::ConstParamData(_))); - let parent_has_consts = - parent_params.iter().any(|(_, x)| matches!(x, TypeOrConstParamData::ConstParamData(_))); - return if has_consts || parent_has_consts { - // XXX: treat const generic associated types as not existing to avoid crashes - // (#11769) - // - // Note: Also crashes when the parent has const generics (also even if the GAT - // doesn't use them), see `tests::regression::gat_crash_3` for an example. - // Avoids that by disabling GATs when the parent (i.e. `impl` block) has - // const generics (#12193). - // - // Chalk expects the inner associated type's parameters to come - // *before*, not after the trait's generics as we've always done it. - // Adapting to this requires a larger refactoring - cov_mark::hit!(ignore_gats); - Generics { def, params: Interned::new(Default::default()), parent_generics } - } else { - Generics { def, params, parent_generics } - }; - } Generics { def, params: db.generic_params(def), parent_generics } } @@ -221,23 +195,30 @@ impl Generics { }) } - /// Iterator over types and const params of parent, then self. + /// Iterator over types and const params of self, then parent. pub(crate) fn iter<'a>( &'a self, ) -> impl DoubleEndedIterator + 'a { let to_toc_id = |it: &'a Generics| { move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p) }; - self.parent_generics() - .into_iter() - .flat_map(move |it| it.params.iter().map(to_toc_id(it))) - .chain(self.params.iter().map(to_toc_id(self))) + self.params.iter().map(to_toc_id(self)).chain(self.iter_parent()) + } + + /// Iterate over types and const params without parent params. + pub(crate) fn iter_self<'a>( + &'a self, + ) -> impl DoubleEndedIterator + 'a { + let to_toc_id = |it: &'a Generics| { + move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p) + }; + self.params.iter().map(to_toc_id(self)) } /// Iterator over types and const params of parent. pub(crate) fn iter_parent<'a>( &'a self, - ) -> impl Iterator + 'a { + ) -> impl DoubleEndedIterator + 'a { self.parent_generics().into_iter().flat_map(|it| { let to_toc_id = move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p); @@ -245,12 +226,18 @@ impl Generics { }) } + /// Returns total number of generic parameters in scope, including those from parent. pub(crate) fn len(&self) -> usize { let parent = self.parent_generics().map_or(0, Generics::len); let child = self.params.type_or_consts.len(); parent + child } + /// Returns numbers of generic parameters excluding those from parent. + pub(crate) fn len_self(&self) -> usize { + self.params.type_or_consts.len() + } + /// (parent total, self param, type param list, const param list, impl trait) pub(crate) fn provenance_split(&self) -> (usize, usize, usize, usize, usize) { let ty_iter = || self.params.iter().filter_map(|x| x.1.type_param()); @@ -275,15 +262,17 @@ impl Generics { if param.parent == self.def { let (idx, (_local_id, data)) = self.params.iter().enumerate().find(|(_, (idx, _))| *idx == param.local_id)?; - let parent_len = self.parent_generics().map_or(0, Generics::len); - Some((parent_len + idx, data)) + Some((idx, data)) } else { - self.parent_generics().and_then(|g| g.find_param(param)) + self.parent_generics() + .and_then(|g| g.find_param(param)) + // Remember that parent parameters come after parameters for self. + .map(|(idx, data)| (self.len_self() + idx, data)) } } - fn parent_generics(&self) -> Option<&Generics> { - self.parent_generics.as_ref().map(|it| &**it) + pub(crate) fn parent_generics(&self) -> Option<&Generics> { + self.parent_generics.as_deref() } /// Returns a Substitution that replaces each parameter by a bound variable. @@ -295,18 +284,10 @@ impl Generics { Substitution::from_iter( Interner, self.iter_id().enumerate().map(|(idx, id)| match id { - Either::Left(_) => GenericArgData::Ty( - TyKind::BoundVar(BoundVar::new(debruijn, idx)).intern(Interner), - ) - .intern(Interner), - Either::Right(id) => GenericArgData::Const( - ConstData { - value: ConstValue::BoundVar(BoundVar::new(debruijn, idx)), - ty: db.const_param_ty(id), - } - .intern(Interner), - ) - .intern(Interner), + Either::Left(_) => BoundVar::new(debruijn, idx).to_ty(Interner).cast(Interner), + Either::Right(id) => BoundVar::new(debruijn, idx) + .to_const(Interner, db.const_param_ty(id)) + .cast(Interner), }), ) } @@ -316,18 +297,12 @@ impl Generics { Substitution::from_iter( Interner, self.iter_id().map(|id| match id { - Either::Left(id) => GenericArgData::Ty( - TyKind::Placeholder(crate::to_placeholder_idx(db, id.into())).intern(Interner), - ) - .intern(Interner), - Either::Right(id) => GenericArgData::Const( - ConstData { - value: ConstValue::Placeholder(crate::to_placeholder_idx(db, id.into())), - ty: db.const_param_ty(id), - } - .intern(Interner), - ) - .intern(Interner), + Either::Left(id) => { + crate::to_placeholder_idx(db, id.into()).to_ty(Interner).cast(Interner) + } + Either::Right(id) => crate::to_placeholder_idx(db, id.into()) + .to_const(Interner, db.const_param_ty(id)) + .cast(Interner), }), ) } diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index 5edc16d8bc..c5dc60f1ec 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -1,7 +1,7 @@ //! Re-export diagnostics such that clients of `hir` don't have to depend on //! low-level crates. //! -//! This probably isn't the best way to do this -- ideally, diagnistics should +//! This probably isn't the best way to do this -- ideally, diagnostics should //! be expressed in terms of hir types themselves. use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs index 0e29c52ade..27b2f445d7 100644 --- a/crates/hir/src/display.rs +++ b/crates/hir/src/display.rs @@ -492,6 +492,9 @@ impl HirDisplay for TypeAlias { write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; let data = f.db.type_alias_data(self.id); write!(f, "type {}", data.name)?; + let def_id = GenericDefId::TypeAliasId(self.id); + write_generic_params(def_id, f)?; + write_where_clause(def_id, f)?; if !data.bounds.is_empty() { f.write_str(": ")?; f.write_joined(&data.bounds, " + ")?; diff --git a/crates/hir/src/from_id.rs b/crates/hir/src/from_id.rs index 9c7558d191..f825a72c0f 100644 --- a/crates/hir/src/from_id.rs +++ b/crates/hir/src/from_id.rs @@ -140,6 +140,7 @@ impl From for DefWithBodyId { DefWithBody::Function(it) => DefWithBodyId::FunctionId(it.id), DefWithBody::Static(it) => DefWithBodyId::StaticId(it.id), DefWithBody::Const(it) => DefWithBodyId::ConstId(it.id), + DefWithBody::Variant(it) => DefWithBodyId::VariantId(it.into()), } } } @@ -150,6 +151,7 @@ impl From for DefWithBody { DefWithBodyId::FunctionId(it) => DefWithBody::Function(it.into()), DefWithBodyId::StaticId(it) => DefWithBody::Static(it.into()), DefWithBodyId::ConstId(it) => DefWithBody::Const(it.into()), + DefWithBodyId::VariantId(it) => DefWithBody::Variant(it.into()), } } } @@ -172,9 +174,7 @@ impl From for GenericDefId { GenericDef::Trait(it) => GenericDefId::TraitId(it.id), GenericDef::TypeAlias(it) => GenericDefId::TypeAliasId(it.id), GenericDef::Impl(it) => GenericDefId::ImplId(it.id), - GenericDef::Variant(it) => { - GenericDefId::EnumVariantId(EnumVariantId { parent: it.parent.id, local_id: it.id }) - } + GenericDef::Variant(it) => GenericDefId::EnumVariantId(it.into()), GenericDef::Const(it) => GenericDefId::ConstId(it.id), } } @@ -188,9 +188,7 @@ impl From for GenericDef { GenericDefId::TraitId(it) => GenericDef::Trait(it.into()), GenericDefId::TypeAliasId(it) => GenericDef::TypeAlias(it.into()), GenericDefId::ImplId(it) => GenericDef::Impl(it.into()), - GenericDefId::EnumVariantId(it) => { - GenericDef::Variant(Variant { parent: it.parent.into(), id: it.local_id }) - } + GenericDefId::EnumVariantId(it) => GenericDef::Variant(it.into()), GenericDefId::ConstId(it) => GenericDef::Const(it.into()), } } diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index d2717c5665..f5324208c9 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -39,7 +39,7 @@ use arrayvec::ArrayVec; use base_db::{CrateDisplayName, CrateId, CrateOrigin, Edition, FileId, ProcMacroKind}; use either::Either; use hir_def::{ - adt::{ReprKind, VariantData}, + adt::{ReprData, VariantData}, body::{BodyDiagnostic, SyntheticSyntax}, expr::{BindingAnnotation, LabelId, Pat, PatId}, generics::{TypeOrConstParamData, TypeParamProvenance}, @@ -50,7 +50,7 @@ use hir_def::{ resolver::{HasResolver, Resolver}, src::HasSource as _, AdtId, AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, DefWithBodyId, EnumId, - FunctionId, GenericDefId, HasModule, ImplId, ItemContainerId, LifetimeParamId, + EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, ItemContainerId, LifetimeParamId, LocalEnumVariantId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, }; @@ -61,7 +61,6 @@ use hir_ty::{ diagnostics::BodyValidationDiagnostic, method_resolution::{self, TyFingerprint}, primitive::UintTy, - subst_prefix, traits::FnTrait, AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution, @@ -73,7 +72,7 @@ use once_cell::unsync::Lazy; use rustc_hash::FxHashSet; use stdx::{impl_from, never}; use syntax::{ - ast::{self, HasAttrs as _, HasDocComments, HasName}, + ast::{self, Expr, HasAttrs as _, HasDocComments, HasName}, AstNode, AstPtr, SmolStr, SyntaxNodePtr, TextRange, T, }; @@ -348,7 +347,10 @@ impl ModuleDef { ModuleDef::Module(it) => it.id.into(), ModuleDef::Const(it) => it.id.into(), ModuleDef::Static(it) => it.id.into(), - _ => return Vec::new(), + ModuleDef::Variant(it) => { + EnumVariantId { parent: it.parent.into(), local_id: it.id }.into() + } + ModuleDef::BuiltinType(_) | ModuleDef::Macro(_) => return Vec::new(), }; let module = match self.module(db) { @@ -377,10 +379,10 @@ impl ModuleDef { ModuleDef::Function(it) => Some(it.into()), ModuleDef::Const(it) => Some(it.into()), ModuleDef::Static(it) => Some(it.into()), + ModuleDef::Variant(it) => Some(it.into()), ModuleDef::Module(_) | ModuleDef::Adt(_) - | ModuleDef::Variant(_) | ModuleDef::Trait(_) | ModuleDef::TypeAlias(_) | ModuleDef::Macro(_) @@ -537,6 +539,30 @@ impl Module { } acc.extend(decl.diagnostics(db)) } + ModuleDef::Adt(adt) => { + match adt { + Adt::Struct(s) => { + for diag in db.struct_data_with_diagnostics(s.id).1.iter() { + emit_def_diagnostic(db, acc, diag); + } + } + Adt::Union(u) => { + for diag in db.union_data_with_diagnostics(u.id).1.iter() { + emit_def_diagnostic(db, acc, diag); + } + } + Adt::Enum(e) => { + for v in e.variants(db) { + acc.extend(ModuleDef::Variant(v).diagnostics(db)); + } + + for diag in db.enum_data_with_diagnostics(e.id).1.iter() { + emit_def_diagnostic(db, acc, diag); + } + } + } + acc.extend(decl.diagnostics(db)) + } _ => acc.extend(decl.diagnostics(db)), } } @@ -874,7 +900,7 @@ impl Struct { Type::from_def(db, self.id) } - pub fn repr(self, db: &dyn HirDatabase) -> Option { + pub fn repr(self, db: &dyn HirDatabase) -> Option { db.struct_data(self.id).repr.clone() } @@ -952,6 +978,21 @@ impl Enum { pub fn ty(self, db: &dyn HirDatabase) -> Type { Type::from_def(db, self.id) } + + /// The type of the enum variant bodies. + pub fn variant_body_ty(self, db: &dyn HirDatabase) -> Type { + Type::new_for_crate( + self.id.lookup(db.upcast()).container.krate(), + TyBuilder::builtin(match db.enum_data(self.id).variant_body_type() { + Either::Left(builtin) => hir_def::builtin_type::BuiltinType::Int(builtin), + Either::Right(builtin) => hir_def::builtin_type::BuiltinType::Uint(builtin), + }), + ) + } + + pub fn is_data_carrying(self, db: &dyn HirDatabase) -> bool { + self.variants(db).iter().any(|v| !matches!(v.kind(db), StructKind::Unit)) + } } impl HasVisibility for Enum { @@ -960,6 +1001,12 @@ impl HasVisibility for Enum { } } +impl From<&Variant> for DefWithBodyId { + fn from(&v: &Variant) -> Self { + DefWithBodyId::VariantId(v.into()) + } +} + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct Variant { pub(crate) parent: Enum, @@ -994,6 +1041,14 @@ impl Variant { pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc { db.enum_data(self.parent.id).variants[self.id].variant_data.clone() } + + pub fn value(self, db: &dyn HirDatabase) -> Option { + self.source(db)?.value.expr() + } + + pub fn eval(self, db: &dyn HirDatabase) -> Result { + db.const_eval_variant(self.into()) + } } /// Variants inherit visibility from the parent enum. @@ -1034,7 +1089,7 @@ impl Adt { pub fn ty_with_args(self, db: &dyn HirDatabase, args: &[Type]) -> Type { let id = AdtId::from(self); let mut it = args.iter().map(|t| t.ty.clone()); - let ty = TyBuilder::def_ty(db, id.into()) + let ty = TyBuilder::def_ty(db, id.into(), None) .fill(|x| { let r = it.next().unwrap_or_else(|| TyKind::Error.intern(Interner)); match x { @@ -1129,8 +1184,9 @@ pub enum DefWithBody { Function(Function), Static(Static), Const(Const), + Variant(Variant), } -impl_from!(Function, Const, Static for DefWithBody); +impl_from!(Function, Const, Static, Variant for DefWithBody); impl DefWithBody { pub fn module(self, db: &dyn HirDatabase) -> Module { @@ -1138,6 +1194,7 @@ impl DefWithBody { DefWithBody::Const(c) => c.module(db), DefWithBody::Function(f) => f.module(db), DefWithBody::Static(s) => s.module(db), + DefWithBody::Variant(v) => v.module(db), } } @@ -1146,6 +1203,7 @@ impl DefWithBody { DefWithBody::Function(f) => Some(f.name(db)), DefWithBody::Static(s) => Some(s.name(db)), DefWithBody::Const(c) => c.name(db), + DefWithBody::Variant(v) => Some(v.name(db)), } } @@ -1155,6 +1213,7 @@ impl DefWithBody { DefWithBody::Function(it) => it.ret_type(db), DefWithBody::Static(it) => it.ty(db), DefWithBody::Const(it) => it.ty(db), + DefWithBody::Variant(it) => it.parent.variant_body_ty(db), } } @@ -1163,6 +1222,7 @@ impl DefWithBody { DefWithBody::Function(it) => it.id.into(), DefWithBody::Static(it) => it.id.into(), DefWithBody::Const(it) => it.id.into(), + DefWithBody::Variant(it) => it.into(), } } @@ -1379,6 +1439,7 @@ impl DefWithBody { DefWithBody::Function(it) => it.into(), DefWithBody::Static(it) => it.into(), DefWithBody::Const(it) => it.into(), + DefWithBody::Variant(it) => it.into(), }; for diag in hir_ty::diagnostics::incorrect_case(db, krate, def.into()) { acc.push(diag.into()) @@ -2485,7 +2546,7 @@ impl TypeParam { let resolver = self.id.parent().resolver(db.upcast()); let ty = params.get(local_idx)?.clone(); let subst = TyBuilder::placeholder_subst(db, self.id.parent()); - let ty = ty.substitute(Interner, &subst_prefix(&subst, local_idx)); + let ty = ty.substitute(Interner, &subst); match ty.data(Interner) { GenericArgData::Ty(x) => Some(Type::new_with_resolver_inner(db, &resolver, x.clone())), _ => None, @@ -2739,7 +2800,22 @@ impl Type { } fn from_def(db: &dyn HirDatabase, def: impl HasResolver + Into) -> Type { - let ty = TyBuilder::def_ty(db, def.into()).fill_with_unknown().build(); + let ty_def = def.into(); + let parent_subst = match ty_def { + TyDefId::TypeAliasId(id) => match id.lookup(db.upcast()).container { + ItemContainerId::TraitId(id) => { + let subst = TyBuilder::subst_for_def(db, id, None).fill_with_unknown().build(); + Some(subst) + } + ItemContainerId::ImplId(id) => { + let subst = TyBuilder::subst_for_def(db, id, None).fill_with_unknown().build(); + Some(subst) + } + _ => None, + }, + _ => None, + }; + let ty = TyBuilder::def_ty(db, ty_def, parent_subst).fill_with_unknown().build(); Type::new(db, def, ty) } @@ -2879,7 +2955,11 @@ impl Type { alias: TypeAlias, ) -> Option { let mut args = args.iter(); - let projection = TyBuilder::assoc_type_projection(db, alias.id) + let trait_id = match alias.id.lookup(db.upcast()).container { + ItemContainerId::TraitId(id) => id, + _ => unreachable!("non assoc type alias reached in normalize_trait_assoc_type()"), + }; + let parent_subst = TyBuilder::subst_for_def(db, trait_id, None) .push(self.ty.clone()) .fill(|x| { // FIXME: this code is not covered in tests. @@ -2891,6 +2971,8 @@ impl Type { } }) .build(); + // FIXME: We don't handle GATs yet. + let projection = TyBuilder::assoc_type_projection(db, alias.id, Some(parent_subst)).build(); let ty = db.normalize_projection(projection, self.env.clone()); if ty.is_unknown() { @@ -2940,7 +3022,7 @@ impl Type { let adt = adt_id.into(); match adt { - Adt::Struct(s) => matches!(s.repr(db), Some(ReprKind::Packed)), + Adt::Struct(s) => matches!(s.repr(db), Some(ReprData { packed: true, .. })), _ => false, } } diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 416b6f5806..119ec3210e 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -257,6 +257,11 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { pub fn original_ast_node(&self, node: N) -> Option { self.imp.original_ast_node(node) } + /// Attempts to map the node out of macro expanded files. + /// This only work for attribute expansions, as other ones do not have nodes as input. + pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option { + self.imp.original_syntax_node(node) + } pub fn diagnostics_display_range(&self, diagnostics: InFile) -> FileRange { self.imp.diagnostics_display_range(diagnostics) @@ -956,6 +961,16 @@ impl<'db> SemanticsImpl<'db> { ) } + fn original_syntax_node(&self, node: &SyntaxNode) -> Option { + let InFile { file_id, .. } = self.find_file(node); + InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map( + |InFile { file_id, value }| { + self.cache(find_root(&value), file_id); + value + }, + ) + } + fn diagnostics_display_range(&self, src: InFile) -> FileRange { let root = self.parse_or_expand(src.file_id).unwrap(); let node = src.map(|it| it.to_node(&root)); diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs index ba9a1cfb6b..fa45e3c12e 100644 --- a/crates/hir/src/semantics/source_to_def.rs +++ b/crates/hir/src/semantics/source_to_def.rs @@ -115,7 +115,7 @@ pub(super) struct SourceToDefCtx<'a, 'b> { } impl SourceToDefCtx<'_, '_> { - pub(super) fn file_to_def(&mut self, file: FileId) -> SmallVec<[ModuleId; 1]> { + pub(super) fn file_to_def(&self, file: FileId) -> SmallVec<[ModuleId; 1]> { let _p = profile::span("SourceBinder::to_module_def"); let mut mods = SmallVec::new(); for &crate_id in self.db.relevant_crates(file).iter() { @@ -130,7 +130,7 @@ impl SourceToDefCtx<'_, '_> { mods } - pub(super) fn module_to_def(&mut self, src: InFile) -> Option { + pub(super) fn module_to_def(&self, src: InFile) -> Option { let _p = profile::span("module_to_def"); let parent_declaration = src .syntax() @@ -151,7 +151,7 @@ impl SourceToDefCtx<'_, '_> { Some(def_map.module_id(child_id)) } - pub(super) fn source_file_to_def(&mut self, src: InFile) -> Option { + pub(super) fn source_file_to_def(&self, src: InFile) -> Option { let _p = profile::span("source_file_to_def"); let file_id = src.file_id.original_file(self.db.upcast()); self.file_to_def(file_id).get(0).copied() @@ -384,7 +384,7 @@ impl SourceToDefCtx<'_, '_> { } else { let it = ast::Variant::cast(container.value)?; let def = self.enum_variant_to_def(InFile::new(container.file_id, it))?; - VariantId::from(def).into() + DefWithBodyId::from(def).into() }; Some(cont) } diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 342912b678..07bae2b38c 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -22,7 +22,7 @@ use hir_def::{ resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs}, type_ref::Mutability, AsMacroCall, AssocItemId, DefWithBodyId, FieldId, FunctionId, ItemContainerId, LocalFieldId, - Lookup, ModuleDefId, VariantId, + Lookup, ModuleDefId, TraitId, VariantId, }; use hir_expand::{ builtin_fn_macro::BuiltinFnLikeExpander, @@ -302,10 +302,15 @@ impl SourceAnalyzer { } } + let future_trait = db + .lang_item(self.resolver.krate(), hir_expand::name![future_trait].to_smol_str())? + .as_trait()?; let poll_fn = db .lang_item(self.resolver.krate(), hir_expand::name![poll].to_smol_str())? .as_function()?; - let substs = hir_ty::TyBuilder::subst_for_def(db, poll_fn).push(ty.clone()).build(); + // HACK: subst for `poll()` coincides with that for `Future` because `poll()` itself + // doesn't have any generic parameters, so we skip building another subst for `poll()`. + let substs = hir_ty::TyBuilder::subst_for_def(db, future_trait, None).push(ty).build(); Some(self.resolve_impl_method_or_trait_def(db, poll_fn, &substs)) } @@ -321,8 +326,10 @@ impl SourceAnalyzer { }; let ty = self.ty_of_expr(db, &prefix_expr.expr()?.into())?; - let op_fn = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?; - let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn).push(ty.clone()).build(); + let (op_trait, op_fn) = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?; + // HACK: subst for all methods coincides with that for their trait because the methods + // don't have any generic parameters, so we skip building another subst for the methods. + let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None).push(ty.clone()).build(); Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs)) } @@ -337,8 +344,10 @@ impl SourceAnalyzer { let lang_item_name = name![index]; - let op_fn = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?; - let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn) + let (op_trait, op_fn) = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?; + // HACK: subst for all methods coincides with that for their trait because the methods + // don't have any generic parameters, so we skip building another subst for the methods. + let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None) .push(base_ty.clone()) .push(index_ty.clone()) .build(); @@ -354,10 +363,14 @@ impl SourceAnalyzer { let lhs = self.ty_of_expr(db, &binop_expr.lhs()?.into())?; let rhs = self.ty_of_expr(db, &binop_expr.rhs()?.into())?; - let op_fn = lang_names_for_bin_op(op) + let (op_trait, op_fn) = lang_names_for_bin_op(op) .and_then(|(name, lang_item)| self.lang_trait_fn(db, &lang_item, &name))?; - let substs = - hir_ty::TyBuilder::subst_for_def(db, op_fn).push(lhs.clone()).push(rhs.clone()).build(); + // HACK: subst for `index()` coincides with that for `Index` because `index()` itself + // doesn't have any generic parameters, so we skip building another subst for `index()`. + let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None) + .push(lhs.clone()) + .push(rhs.clone()) + .build(); Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs)) } @@ -371,7 +384,13 @@ impl SourceAnalyzer { let op_fn = db.lang_item(self.resolver.krate(), name![branch].to_smol_str())?.as_function()?; - let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn).push(ty.clone()).build(); + let op_trait = match op_fn.lookup(db.upcast()).container { + ItemContainerId::TraitId(id) => id, + _ => return None, + }; + // HACK: subst for `branch()` coincides with that for `Try` because `branch()` itself + // doesn't have any generic parameters, so we skip building another subst for `branch()`. + let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None).push(ty.clone()).build(); Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs)) } @@ -799,9 +818,10 @@ impl SourceAnalyzer { db: &dyn HirDatabase, lang_trait: &Name, method_name: &Name, - ) -> Option { - db.trait_data(db.lang_item(self.resolver.krate(), lang_trait.to_smol_str())?.as_trait()?) - .method_by_name(method_name) + ) -> Option<(TraitId, FunctionId)> { + let trait_id = db.lang_item(self.resolver.krate(), lang_trait.to_smol_str())?.as_trait()?; + let fn_id = db.trait_data(trait_id).method_by_name(method_name)?; + Some((trait_id, fn_id)) } fn ty_of_expr(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<&Ty> { diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs index 616a406c72..fd78decda4 100644 --- a/crates/hir/src/symbols.rs +++ b/crates/hir/src/symbols.rs @@ -244,6 +244,10 @@ impl<'a> SymbolCollector<'a> { DefWithBodyId::ConstId(id) => Some( id.lookup(self.db.upcast()).source(self.db.upcast()).value.name()?.text().into(), ), + DefWithBodyId::VariantId(id) => Some({ + let db = self.db.upcast(); + id.parent.lookup(db).source(db).value.name()?.text().into() + }), } } diff --git a/crates/ide-assists/src/handlers/move_format_string_arg.rs b/crates/ide-assists/src/handlers/move_format_string_arg.rs index 92b2fa79d7..aa710d2ce6 100644 --- a/crates/ide-assists/src/handlers/move_format_string_arg.rs +++ b/crates/ide-assists/src/handlers/move_format_string_arg.rs @@ -7,6 +7,7 @@ use ide_db::{ }, }; use itertools::Itertools; +use stdx::format_to; use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxKind::COMMA, TextRange}; // Assist: move_format_string_arg @@ -78,20 +79,26 @@ pub(crate) fn move_format_string_arg(acc: &mut Assists, ctx: &AssistContext<'_>) // Extract existing arguments in macro let tokens = - tt.token_trees_and_tokens().filter_map(NodeOrToken::into_token).collect_vec(); + tt.token_trees_and_tokens().collect_vec(); let mut existing_args: Vec = vec![]; let mut current_arg = String::new(); - if let [_opening_bracket, format_string, _args_start_comma, tokens @ .., end_bracket] = + if let [_opening_bracket, NodeOrToken::Token(format_string), _args_start_comma, tokens @ .., NodeOrToken::Token(end_bracket)] = tokens.as_slice() { for t in tokens { - if t.kind() == COMMA { - existing_args.push(current_arg.trim().into()); - current_arg.clear(); - } else { - current_arg.push_str(t.text()); + match t { + NodeOrToken::Node(n) => { + format_to!(current_arg, "{n}"); + }, + NodeOrToken::Token(t) if t.kind() == COMMA=> { + existing_args.push(current_arg.trim().into()); + current_arg.clear(); + }, + NodeOrToken::Token(t) => { + current_arg.push_str(t.text()); + }, } } existing_args.push(current_arg.trim().into()); @@ -261,6 +268,27 @@ fn main() { fn main() { print!("{} {:b} {}"$0, 1, x + 1, Struct(1, 2)); } +"#, + ), + ); + } + + #[test] + fn nested_tt() { + check_assist( + move_format_string_arg, + &add_macro_decl( + r#" +fn main() { + print!("My name is {} {x$0 + x}", stringify!(Paperino)) +} +"#, + ), + &add_macro_decl( + r#" +fn main() { + print!("My name is {} {}"$0, stringify!(Paperino), x + x) +} "#, ), ); diff --git a/crates/ide-assists/src/handlers/unwrap_tuple.rs b/crates/ide-assists/src/handlers/unwrap_tuple.rs new file mode 100644 index 0000000000..25c58d086e --- /dev/null +++ b/crates/ide-assists/src/handlers/unwrap_tuple.rs @@ -0,0 +1,159 @@ +use syntax::{ + ast::{self, edit::AstNodeEdit}, + AstNode, T, +}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: unwrap_tuple +// +// Unwrap the tuple to different variables. +// +// ``` +// # //- minicore: result +// fn main() { +// $0let (foo, bar) = ("Foo", "Bar"); +// } +// ``` +// -> +// ``` +// fn main() { +// let foo = "Foo"; +// let bar = "Bar"; +// } +// ``` +pub(crate) fn unwrap_tuple(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let let_kw = ctx.find_token_syntax_at_offset(T![let])?; + let let_stmt = let_kw.parent().and_then(ast::LetStmt::cast)?; + let indent_level = let_stmt.indent_level().0 as usize; + let pat = let_stmt.pat()?; + let ty = let_stmt.ty(); + let init = let_stmt.initializer()?; + + // This only applies for tuple patterns, types, and initializers. + let tuple_pat = match pat { + ast::Pat::TuplePat(pat) => pat, + _ => return None, + }; + let tuple_ty = ty.and_then(|it| match it { + ast::Type::TupleType(ty) => Some(ty), + _ => None, + }); + let tuple_init = match init { + ast::Expr::TupleExpr(expr) => expr, + _ => return None, + }; + + if tuple_pat.fields().count() != tuple_init.fields().count() { + return None; + } + if let Some(tys) = &tuple_ty { + if tuple_pat.fields().count() != tys.fields().count() { + return None; + } + } + + let parent = let_kw.parent()?; + + acc.add( + AssistId("unwrap_tuple", AssistKind::RefactorRewrite), + "Unwrap tuple", + let_kw.text_range(), + |edit| { + let indents = " ".repeat(indent_level); + + // If there is an ascribed type, insert that type for each declaration, + // otherwise, omit that type. + if let Some(tys) = tuple_ty { + let mut zipped_decls = String::new(); + for (pat, ty, expr) in + itertools::izip!(tuple_pat.fields(), tys.fields(), tuple_init.fields()) + { + zipped_decls.push_str(&format!("{}let {pat}: {ty} = {expr};\n", indents)) + } + edit.replace(parent.text_range(), zipped_decls.trim()); + } else { + let mut zipped_decls = String::new(); + for (pat, expr) in itertools::izip!(tuple_pat.fields(), tuple_init.fields()) { + zipped_decls.push_str(&format!("{}let {pat} = {expr};\n", indents)); + } + edit.replace(parent.text_range(), zipped_decls.trim()); + } + }, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::check_assist; + + use super::*; + + #[test] + fn unwrap_tuples() { + check_assist( + unwrap_tuple, + r#" +fn main() { + $0let (foo, bar) = ("Foo", "Bar"); +} +"#, + r#" +fn main() { + let foo = "Foo"; + let bar = "Bar"; +} +"#, + ); + + check_assist( + unwrap_tuple, + r#" +fn main() { + $0let (foo, bar, baz) = ("Foo", "Bar", "Baz"); +} +"#, + r#" +fn main() { + let foo = "Foo"; + let bar = "Bar"; + let baz = "Baz"; +} +"#, + ); + } + + #[test] + fn unwrap_tuple_with_types() { + check_assist( + unwrap_tuple, + r#" +fn main() { + $0let (foo, bar): (u8, i32) = (5, 10); +} +"#, + r#" +fn main() { + let foo: u8 = 5; + let bar: i32 = 10; +} +"#, + ); + + check_assist( + unwrap_tuple, + r#" +fn main() { + $0let (foo, bar, baz): (u8, i32, f64) = (5, 10, 17.5); +} +"#, + r#" +fn main() { + let foo: u8 = 5; + let bar: i32 = 10; + let baz: f64 = 17.5; +} +"#, + ); + } +} diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index 812d22efbd..82bcc3dfa5 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -189,6 +189,7 @@ mod handlers { mod replace_turbofish_with_explicit_type; mod split_import; mod unmerge_match_arm; + mod unwrap_tuple; mod sort_items; mod toggle_ignore; mod unmerge_use; @@ -291,6 +292,7 @@ mod handlers { unnecessary_async::unnecessary_async, unwrap_block::unwrap_block, unwrap_result_return_type::unwrap_result_return_type, + unwrap_tuple::unwrap_tuple, wrap_return_type_in_result::wrap_return_type_in_result, // These are manually sorted for better priorities. By default, // priority is determined by the size of the target range (smaller diff --git a/crates/ide-assists/src/tests.rs b/crates/ide-assists/src/tests.rs index 258144bae3..a3bb66e379 100644 --- a/crates/ide-assists/src/tests.rs +++ b/crates/ide-assists/src/tests.rs @@ -96,8 +96,10 @@ fn check_doc_test(assist_id: &str, before: &str, after: &str) { }); let actual = { - let source_change = - assist.source_change.expect("Assist did not contain any source changes"); + let source_change = assist + .source_change + .filter(|it| !it.source_file_edits.is_empty() || !it.file_system_edits.is_empty()) + .expect("Assist did not contain any source changes"); let mut actual = before; if let Some(source_file_edit) = source_change.get_source_edit(file_id) { source_file_edit.apply(&mut actual); @@ -140,8 +142,10 @@ fn check(handler: Handler, before: &str, expected: ExpectedResult<'_>, assist_la match (assist, expected) { (Some(assist), ExpectedResult::After(after)) => { - let source_change = - assist.source_change.expect("Assist did not contain any source changes"); + let source_change = assist + .source_change + .filter(|it| !it.source_file_edits.is_empty() || !it.file_system_edits.is_empty()) + .expect("Assist did not contain any source changes"); let skip_header = source_change.source_file_edits.len() == 1 && source_change.file_system_edits.len() == 0; diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index 3a696635af..d403f86c6d 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -2386,6 +2386,25 @@ fn foo() -> i32 { 42i32 } ) } +#[test] +fn doctest_unwrap_tuple() { + check_doc_test( + "unwrap_tuple", + r#####" +//- minicore: result +fn main() { + $0let (foo, bar) = ("Foo", "Bar"); +} +"#####, + r#####" +fn main() { + let foo = "Foo"; + let bar = "Bar"; +} +"#####, + ) +} + #[test] fn doctest_wrap_return_type_in_result() { check_doc_test( diff --git a/crates/ide-completion/src/completions/item_list/trait_impl.rs b/crates/ide-completion/src/completions/item_list/trait_impl.rs index 785db6fde1..e82cbfdcb8 100644 --- a/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -38,7 +38,7 @@ use ide_db::{ }; use syntax::{ ast::{self, edit_in_place::AttrsOwnerEdit}, - AstNode, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, T, + AstNode, SyntaxElement, SyntaxKind, TextRange, T, }; use text_edit::TextEdit; @@ -85,20 +85,36 @@ fn complete_trait_impl_name( name: &Option, kind: ImplCompletionKind, ) -> Option<()> { - let token = ctx.token.clone(); let item = match name { Some(name) => name.syntax().parent(), - None => if token.kind() == SyntaxKind::WHITESPACE { token.prev_token()? } else { token } - .parent(), + None => { + let token = &ctx.token; + match token.kind() { + SyntaxKind::WHITESPACE => token.prev_token()?, + _ => token.clone(), + } + .parent() + } }?; - complete_trait_impl( - acc, - ctx, - kind, - replacement_range(ctx, &item), - // item -> ASSOC_ITEM_LIST -> IMPL - &ast::Impl::cast(item.parent()?.parent()?)?, - ); + let item = ctx.sema.original_syntax_node(&item)?; + // item -> ASSOC_ITEM_LIST -> IMPL + let impl_def = ast::Impl::cast(item.parent()?.parent()?)?; + let replacement_range = { + // ctx.sema.original_ast_node(item)?; + let first_child = item + .children_with_tokens() + .find(|child| { + !matches!( + child.kind(), + SyntaxKind::COMMENT | SyntaxKind::WHITESPACE | SyntaxKind::ATTR + ) + }) + .unwrap_or_else(|| SyntaxElement::Node(item.clone())); + + TextRange::new(first_child.text_range().start(), ctx.source_range().end()) + }; + + complete_trait_impl(acc, ctx, kind, replacement_range, &impl_def); Some(()) } @@ -341,17 +357,6 @@ fn function_declaration(node: &ast::Fn, needs_whitespace: bool) -> String { syntax.trim_end().to_owned() } -fn replacement_range(ctx: &CompletionContext<'_>, item: &SyntaxNode) -> TextRange { - let first_child = item - .children_with_tokens() - .find(|child| { - !matches!(child.kind(), SyntaxKind::COMMENT | SyntaxKind::WHITESPACE | SyntaxKind::ATTR) - }) - .unwrap_or_else(|| SyntaxElement::Node(item.clone())); - - TextRange::new(first_child.text_range().start(), ctx.source_range().end()) -} - #[cfg(test)] mod tests { use expect_test::{expect, Expect}; diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index 7cabdb55e8..82b85f2fa5 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -239,6 +239,7 @@ impl Definition { DefWithBody::Function(f) => f.source(db).map(|src| src.syntax().cloned()), DefWithBody::Const(c) => c.source(db).map(|src| src.syntax().cloned()), DefWithBody::Static(s) => s.source(db).map(|src| src.syntax().cloned()), + DefWithBody::Variant(v) => v.source(db).map(|src| src.syntax().cloned()), }; return match def { Some(def) => SearchScope::file_range(def.as_ref().original_file_range(db)), diff --git a/crates/ide-diagnostics/src/handlers/inactive_code.rs b/crates/ide-diagnostics/src/handlers/inactive_code.rs index 04918891b2..f558b7256a 100644 --- a/crates/ide-diagnostics/src/handlers/inactive_code.rs +++ b/crates/ide-diagnostics/src/handlers/inactive_code.rs @@ -137,6 +137,37 @@ trait Bar { #[cfg_attr(not(never), inline, cfg(no))] fn h() {} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled +"#, + ); + } + + #[test] + fn inactive_fields_and_variants() { + check( + r#" +enum Foo { + #[cfg(a)] Bar, +//^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled + Baz { + #[cfg(a)] baz: String, + //^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled + }, + Qux(#[cfg(a)] String), + //^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled +} + +struct Baz { + #[cfg(a)] baz: String, +//^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled +} + +struct Qux(#[cfg(a)] String); + //^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled + +union FooBar { + #[cfg(a)] baz: u32, +//^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled +} "#, ); } diff --git a/crates/ide/src/annotations.rs b/crates/ide/src/annotations.rs index bfbe0db6e4..f994c284c7 100644 --- a/crates/ide/src/annotations.rs +++ b/crates/ide/src/annotations.rs @@ -8,13 +8,15 @@ use ide_db::{ use syntax::{ast::HasName, AstNode, TextRange}; use crate::{ - fn_references::find_all_methods, + annotations::fn_references::find_all_methods, goto_implementation::goto_implementation, references::find_all_refs, runnables::{runnables, Runnable}, NavigationTarget, RunnableKind, }; +mod fn_references; + // Feature: Annotations // // Provides user with annotations above items for looking up references or impl blocks @@ -30,8 +32,8 @@ pub struct Annotation { #[derive(Debug)] pub enum AnnotationKind { Runnable(Runnable), - HasImpls { file_id: FileId, data: Option> }, - HasReferences { file_id: FileId, data: Option> }, + HasImpls { pos: FilePosition, data: Option> }, + HasReferences { pos: FilePosition, data: Option> }, } pub struct AnnotationConfig { @@ -68,13 +70,23 @@ pub(crate) fn annotations( } } + let mk_ranges = |(range, focus): (_, Option<_>)| { + let cmd_target: TextRange = focus.unwrap_or(range); + let annotation_range = match config.location { + AnnotationLocation::AboveName => cmd_target, + AnnotationLocation::AboveWholeItem => range, + }; + let target_pos = FilePosition { file_id, offset: cmd_target.start() }; + (annotation_range, target_pos) + }; + visit_file_defs(&Semantics::new(db), file_id, &mut |def| { let range = match def { Definition::Const(konst) if config.annotate_references => { - konst.source(db).and_then(|node| name_range(db, config, node, file_id)) + konst.source(db).and_then(|node| name_range(db, node, file_id)) } Definition::Trait(trait_) if config.annotate_references || config.annotate_impls => { - trait_.source(db).and_then(|node| name_range(db, config, node, file_id)) + trait_.source(db).and_then(|node| name_range(db, node, file_id)) } Definition::Adt(adt) => match adt { hir::Adt::Enum(enum_) => { @@ -83,27 +95,29 @@ pub(crate) fn annotations( .variants(db) .into_iter() .map(|variant| { - variant - .source(db) - .and_then(|node| name_range(db, config, node, file_id)) + variant.source(db).and_then(|node| name_range(db, node, file_id)) }) .flatten() .for_each(|range| { + let (annotation_range, target_position) = mk_ranges(range); annotations.push(Annotation { - range, - kind: AnnotationKind::HasReferences { file_id, data: None }, + range: annotation_range, + kind: AnnotationKind::HasReferences { + pos: target_position, + data: None, + }, }) }) } if config.annotate_references || config.annotate_impls { - enum_.source(db).and_then(|node| name_range(db, config, node, file_id)) + enum_.source(db).and_then(|node| name_range(db, node, file_id)) } else { None } } _ => { if config.annotate_references || config.annotate_impls { - adt.source(db).and_then(|node| name_range(db, config, node, file_id)) + adt.source(db).and_then(|node| name_range(db, node, file_id)) } else { None } @@ -116,33 +130,32 @@ pub(crate) fn annotations( Some(range) => range, None => return, }; - + let (annotation_range, target_pos) = mk_ranges(range); if config.annotate_impls && !matches!(def, Definition::Const(_)) { - annotations - .push(Annotation { range, kind: AnnotationKind::HasImpls { file_id, data: None } }); + annotations.push(Annotation { + range: annotation_range, + kind: AnnotationKind::HasImpls { pos: target_pos, data: None }, + }); } if config.annotate_references { annotations.push(Annotation { - range, - kind: AnnotationKind::HasReferences { file_id, data: None }, + range: annotation_range, + kind: AnnotationKind::HasReferences { pos: target_pos, data: None }, }); } fn name_range( db: &RootDatabase, - config: &AnnotationConfig, node: InFile, source_file_id: FileId, - ) -> Option { + ) -> Option<(TextRange, Option)> { if let Some(InFile { file_id, value }) = node.original_ast_node(db) { if file_id == source_file_id.into() { - return match config.location { - AnnotationLocation::AboveName => { - value.name().map(|name| name.syntax().text_range()) - } - AnnotationLocation::AboveWholeItem => Some(value.syntax().text_range()), - }; + return Some(( + value.syntax().text_range(), + value.name().map(|name| name.syntax().text_range()), + )); } } None @@ -150,12 +163,13 @@ pub(crate) fn annotations( }); if config.annotate_method_references { - annotations.extend(find_all_methods(db, file_id).into_iter().map( - |FileRange { file_id, range }| Annotation { - range, - kind: AnnotationKind::HasReferences { file_id, data: None }, - }, - )); + annotations.extend(find_all_methods(db, file_id).into_iter().map(|range| { + let (annotation_range, target_range) = mk_ranges(range); + Annotation { + range: annotation_range, + kind: AnnotationKind::HasReferences { pos: target_range, data: None }, + } + })); } annotations @@ -163,18 +177,11 @@ pub(crate) fn annotations( pub(crate) fn resolve_annotation(db: &RootDatabase, mut annotation: Annotation) -> Annotation { match annotation.kind { - AnnotationKind::HasImpls { file_id, ref mut data } => { - *data = - goto_implementation(db, FilePosition { file_id, offset: annotation.range.start() }) - .map(|range| range.info); + AnnotationKind::HasImpls { pos, ref mut data } => { + *data = goto_implementation(db, pos).map(|range| range.info); } - AnnotationKind::HasReferences { file_id, ref mut data } => { - *data = find_all_refs( - &Semantics::new(db), - FilePosition { file_id, offset: annotation.range.start() }, - None, - ) - .map(|result| { + AnnotationKind::HasReferences { pos, ref mut data } => { + *data = find_all_refs(&Semantics::new(db), pos, None).map(|result| { result .into_iter() .flat_map(|res| res.references) @@ -268,9 +275,12 @@ fn main() { Annotation { range: 6..10, kind: HasReferences { - file_id: FileId( - 0, - ), + pos: FilePosition { + file_id: FileId( + 0, + ), + offset: 6, + }, data: Some( [ FileRange { @@ -286,9 +296,12 @@ fn main() { Annotation { range: 30..36, kind: HasReferences { - file_id: FileId( - 0, - ), + pos: FilePosition { + file_id: FileId( + 0, + ), + offset: 30, + }, data: Some( [], ), @@ -297,9 +310,12 @@ fn main() { Annotation { range: 53..57, kind: HasReferences { - file_id: FileId( - 0, - ), + pos: FilePosition { + file_id: FileId( + 0, + ), + offset: 53, + }, data: Some( [], ), @@ -344,9 +360,12 @@ fn main() { Annotation { range: 7..11, kind: HasImpls { - file_id: FileId( - 0, - ), + pos: FilePosition { + file_id: FileId( + 0, + ), + offset: 7, + }, data: Some( [], ), @@ -355,9 +374,12 @@ fn main() { Annotation { range: 7..11, kind: HasReferences { - file_id: FileId( - 0, - ), + pos: FilePosition { + file_id: FileId( + 0, + ), + offset: 7, + }, data: Some( [ FileRange { @@ -373,9 +395,12 @@ fn main() { Annotation { range: 17..21, kind: HasReferences { - file_id: FileId( - 0, - ), + pos: FilePosition { + file_id: FileId( + 0, + ), + offset: 17, + }, data: Some( [], ), @@ -424,9 +449,12 @@ fn main() { Annotation { range: 7..11, kind: HasImpls { - file_id: FileId( - 0, - ), + pos: FilePosition { + file_id: FileId( + 0, + ), + offset: 7, + }, data: Some( [ NavigationTarget { @@ -445,9 +473,12 @@ fn main() { Annotation { range: 7..11, kind: HasReferences { - file_id: FileId( - 0, - ), + pos: FilePosition { + file_id: FileId( + 0, + ), + offset: 7, + }, data: Some( [ FileRange { @@ -469,9 +500,12 @@ fn main() { Annotation { range: 20..31, kind: HasImpls { - file_id: FileId( - 0, - ), + pos: FilePosition { + file_id: FileId( + 0, + ), + offset: 20, + }, data: Some( [ NavigationTarget { @@ -490,9 +524,12 @@ fn main() { Annotation { range: 20..31, kind: HasReferences { - file_id: FileId( - 0, - ), + pos: FilePosition { + file_id: FileId( + 0, + ), + offset: 20, + }, data: Some( [ FileRange { @@ -508,9 +545,12 @@ fn main() { Annotation { range: 69..73, kind: HasReferences { - file_id: FileId( - 0, - ), + pos: FilePosition { + file_id: FileId( + 0, + ), + offset: 69, + }, data: Some( [], ), @@ -551,9 +591,12 @@ fn main() {} Annotation { range: 3..7, kind: HasReferences { - file_id: FileId( - 0, - ), + pos: FilePosition { + file_id: FileId( + 0, + ), + offset: 3, + }, data: Some( [], ), @@ -602,9 +645,12 @@ fn main() { Annotation { range: 7..11, kind: HasImpls { - file_id: FileId( - 0, - ), + pos: FilePosition { + file_id: FileId( + 0, + ), + offset: 7, + }, data: Some( [ NavigationTarget { @@ -623,9 +669,12 @@ fn main() { Annotation { range: 7..11, kind: HasReferences { - file_id: FileId( - 0, - ), + pos: FilePosition { + file_id: FileId( + 0, + ), + offset: 7, + }, data: Some( [ FileRange { @@ -647,9 +696,12 @@ fn main() { Annotation { range: 33..44, kind: HasReferences { - file_id: FileId( - 0, - ), + pos: FilePosition { + file_id: FileId( + 0, + ), + offset: 33, + }, data: Some( [ FileRange { @@ -665,9 +717,12 @@ fn main() { Annotation { range: 61..65, kind: HasReferences { - file_id: FileId( - 0, - ), + pos: FilePosition { + file_id: FileId( + 0, + ), + offset: 61, + }, data: Some( [], ), @@ -761,9 +816,12 @@ mod tests { Annotation { range: 3..7, kind: HasReferences { - file_id: FileId( - 0, - ), + pos: FilePosition { + file_id: FileId( + 0, + ), + offset: 3, + }, data: Some( [], ), @@ -821,9 +879,12 @@ struct Foo; Annotation { range: 0..71, kind: HasImpls { - file_id: FileId( - 0, - ), + pos: FilePosition { + file_id: FileId( + 0, + ), + offset: 67, + }, data: Some( [], ), @@ -832,10 +893,15 @@ struct Foo; Annotation { range: 0..71, kind: HasReferences { - file_id: FileId( - 0, + pos: FilePosition { + file_id: FileId( + 0, + ), + offset: 67, + }, + data: Some( + [], ), - data: None, }, }, ] diff --git a/crates/ide/src/fn_references.rs b/crates/ide/src/annotations/fn_references.rs similarity index 61% rename from crates/ide/src/fn_references.rs rename to crates/ide/src/annotations/fn_references.rs index 63fb322cea..0cadf125fe 100644 --- a/crates/ide/src/fn_references.rs +++ b/crates/ide/src/annotations/fn_references.rs @@ -4,30 +4,38 @@ use hir::Semantics; use ide_assists::utils::test_related_attribute; use ide_db::RootDatabase; -use syntax::{ast, ast::HasName, AstNode, SyntaxNode}; +use syntax::{ast, ast::HasName, AstNode, SyntaxNode, TextRange}; -use crate::{FileId, FileRange}; +use crate::FileId; -pub(crate) fn find_all_methods(db: &RootDatabase, file_id: FileId) -> Vec { +pub(super) fn find_all_methods( + db: &RootDatabase, + file_id: FileId, +) -> Vec<(TextRange, Option)> { let sema = Semantics::new(db); let source_file = sema.parse(file_id); - source_file.syntax().descendants().filter_map(|it| method_range(it, file_id)).collect() + source_file.syntax().descendants().filter_map(|it| method_range(it)).collect() } -fn method_range(item: SyntaxNode, file_id: FileId) -> Option { +fn method_range(item: SyntaxNode) -> Option<(TextRange, Option)> { ast::Fn::cast(item).and_then(|fn_def| { if test_related_attribute(&fn_def).is_some() { None } else { - fn_def.name().map(|name| FileRange { file_id, range: name.syntax().text_range() }) + Some(( + fn_def.syntax().text_range(), + fn_def.name().map(|name| name.syntax().text_range()), + )) } }) } #[cfg(test)] mod tests { + use syntax::TextRange; + use crate::fixture; - use crate::{FileRange, TextSize}; + use crate::TextSize; use std::ops::RangeInclusive; #[test] @@ -42,7 +50,7 @@ mod tests { "#, ); - let refs = analysis.find_all_methods(pos.file_id).unwrap(); + let refs = super::find_all_methods(&analysis.db, pos.file_id); check_result(&refs, &[3..=13, 27..=33, 47..=57]); } @@ -57,7 +65,7 @@ mod tests { "#, ); - let refs = analysis.find_all_methods(pos.file_id).unwrap(); + let refs = super::find_all_methods(&analysis.db, pos.file_id); check_result(&refs, &[19..=22, 35..=38]); } @@ -78,17 +86,18 @@ mod tests { "#, ); - let refs = analysis.find_all_methods(pos.file_id).unwrap(); + let refs = super::find_all_methods(&analysis.db, pos.file_id); check_result(&refs, &[28..=34]); } - fn check_result(refs: &[FileRange], expected: &[RangeInclusive]) { + fn check_result(refs: &[(TextRange, Option)], expected: &[RangeInclusive]) { assert_eq!(refs.len(), expected.len()); - for (i, item) in refs.iter().enumerate() { + for (i, &(full, focus)) in refs.iter().enumerate() { let range = &expected[i]; - assert_eq!(TextSize::from(*range.start()), item.range.start()); - assert_eq!(TextSize::from(*range.end()), item.range.end()); + let item = focus.unwrap_or(full); + assert_eq!(TextSize::from(*range.start()), item.start()); + assert_eq!(TextSize::from(*range.end()), item.end()); } } } diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs index 92ce26b422..d96827326c 100644 --- a/crates/ide/src/doc_links.rs +++ b/crates/ide/src/doc_links.rs @@ -232,8 +232,13 @@ pub(crate) fn token_as_doc_comment(doc_token: &SyntaxToken) -> Option TextSize::try_from(comment.prefix().len()).ok(), - ast::String(string) => doc_token.parent_ancestors().find_map(ast::Attr::cast) - .filter(|attr| attr.simple_name().as_deref() == Some("doc")).and_then(|_| string.open_quote_text_range().map(|it| it.len())), + ast::String(string) => { + doc_token.parent_ancestors().find_map(ast::Attr::cast).filter(|attr| attr.simple_name().as_deref() == Some("doc"))?; + if doc_token.parent_ancestors().find_map(ast::MacroCall::cast).filter(|mac| mac.path().and_then(|p| p.segment()?.name_ref()).as_ref().map(|n| n.text()).as_deref() == Some("include_str")).is_some() { + return None; + } + string.open_quote_text_range().map(|it| it.len()) + }, _ => None, } }).map(|prefix_len| DocCommentToken { prefix_len, doc_token: doc_token.clone() }) diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index 36a648fe4a..f86ea61d15 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -95,6 +95,14 @@ fn try_lookup_include_path( if !matches!(&*name.text(), "include" | "include_str" | "include_bytes") { return None; } + + // Ignore non-built-in macros to account for shadowing + if let Some(it) = sema.resolve_macro_call(¯o_call) { + if !matches!(it.kind(sema.db), hir::MacroKind::BuiltIn) { + return None; + } + } + let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?; let size = sema.db.file_text(file_id).len().try_into().ok()?; Some(NavigationTarget { @@ -156,9 +164,6 @@ mod tests { fn check(ra_fixture: &str) { let (analysis, position, expected) = fixture::annotations(ra_fixture); let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info; - if navs.is_empty() { - panic!("unresolved reference") - } let cmp = |&FileRange { file_id, range }: &_| (file_id, range.start()); let navs = navs @@ -1348,6 +1353,10 @@ fn f(e: Enum) { check( r#" //- /main.rs + +#[rustc_builtin_macro] +macro_rules! include_str {} + fn main() { let str = include_str!("foo.txt$0"); } @@ -1357,6 +1366,42 @@ fn main() { "#, ); } + + #[test] + fn goto_doc_include_str() { + check( + r#" +//- /main.rs +#[rustc_builtin_macro] +macro_rules! include_str {} + +#[doc = include_str!("docs.md$0")] +struct Item; + +//- /docs.md +// docs +//^file +"#, + ); + } + + #[test] + fn goto_shadow_include() { + check( + r#" +//- /main.rs +macro_rules! include { + ("included.rs") => {} +} + +include!("included.rs$0"); + +//- /included.rs +// empty +"#, + ); + } + #[cfg(test)] mod goto_impl_of_trait_fn { use super::check; diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs index 1bdd626f1e..540a115832 100644 --- a/crates/ide/src/highlight_related.rs +++ b/crates/ide/src/highlight_related.rs @@ -1373,6 +1373,22 @@ fn main() { ().func$0(); //^^^^ } +"#, + ); + } + + #[test] + fn test_assoc_type_highlighting() { + check( + r#" +trait Trait { + type Output; + // ^^^^^^ +} +impl Trait for () { + type Output$0 = (); + // ^^^^^^ +} "#, ); } diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index c5c50d88dd..d109c07691 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -346,7 +346,16 @@ pub(super) fn definition( Definition::Module(it) => label_and_docs(db, it), Definition::Function(it) => label_and_docs(db, it), Definition::Adt(it) => label_and_docs(db, it), - Definition::Variant(it) => label_and_docs(db, it), + Definition::Variant(it) => label_value_and_docs(db, it, |&it| { + if !it.parent_enum(db).is_data_carrying(db) { + match it.eval(db) { + Ok(x) => Some(format!("{}", x)), + Err(_) => it.value(db).map(|x| format!("{:?}", x)), + } + } else { + None + } + }), Definition::Const(it) => label_value_and_docs(db, it, |it| { let body = it.eval(db); match body { diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index 4b8b47783d..eb997e6fef 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -698,6 +698,7 @@ fn hover_enum_variant() { check( r#" enum Option { + Some(T) /// The None variant Non$0e } @@ -3527,6 +3528,112 @@ impl Foo {} ); } +#[test] +fn hover_const_eval_variant() { + // show hex for <10 + check( + r#" +#[repr(u8)] +enum E { + /// This is a doc + A$0 = 1 << 3, +} +"#, + expect![[r#" + *A* + + ```rust + test::E + ``` + + ```rust + A = 8 + ``` + + --- + + This is a doc + "#]], + ); + // show hex for >10 + check( + r#" +#[repr(u8)] +enum E { + /// This is a doc + A$0 = (1 << 3) + (1 << 2), +} +"#, + expect![[r#" + *A* + + ```rust + test::E + ``` + + ```rust + A = 12 (0xC) + ``` + + --- + + This is a doc + "#]], + ); + // enums in const eval + check( + r#" +#[repr(u8)] +enum E { + A = 1, + /// This is a doc + B$0 = E::A as u8 + 1, +} +"#, + expect![[r#" + *B* + + ```rust + test::E + ``` + + ```rust + B = 2 + ``` + + --- + + This is a doc + "#]], + ); + // unspecified variant should increment by one + check( + r#" +#[repr(u8)] +enum E { + A = 4, + /// This is a doc + B$0, +} +"#, + expect![[r#" + *B* + + ```rust + test::E + ``` + + ```rust + B = 5 + ``` + + --- + + This is a doc + "#]], + ); +} + #[test] fn hover_const_eval() { // show hex for <10 @@ -3820,6 +3927,35 @@ fn foo() { --- + This is a doc + "#]], + ); + check( + r#" +enum E { + /// This is a doc + A = 3, +} +fn foo(e: E) { + match e { + E::A$0 => (), + _ => () + } +} +"#, + expect![[r#" + *A* + + ```rust + test::E + ``` + + ```rust + A = 3 + ``` + + --- + This is a doc "#]], ); diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 93fcd7cad7..34d8bf67a3 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -176,12 +176,6 @@ impl fmt::Debug for InlayHintLabelPart { // * elided lifetimes // * compiler inserted reborrows // -// |=== -// | Editor | Action Name -// -// | VS Code | **rust-analyzer: Toggle inlay hints* -// |=== -// // image::https://user-images.githubusercontent.com/48062697/113020660-b5f98b80-917a-11eb-8d70-3be3fd558cdd.png[] pub(crate) fn inlay_hints( db: &RootDatabase, @@ -2030,7 +2024,14 @@ impl Vec { } impl IntoIterator for Vec { - type Item=T; + type Item = T; + type IntoIter = IntoIter; +} + +struct IntoIter {} + +impl Iterator for IntoIter { + type Item = T; } fn main() { diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index c1ef25b592..77fe0dbf55 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -31,7 +31,6 @@ mod highlight_related; mod expand_macro; mod extend_selection; mod file_structure; -mod fn_references; mod folding_ranges; mod goto_declaration; mod goto_definition; @@ -236,7 +235,7 @@ impl Analysis { Env::default(), Ok(Vec::new()), false, - CrateOrigin::CratesIo { repo: None }, + CrateOrigin::CratesIo { repo: None, name: None }, ); change.change_file(file_id, Some(Arc::new(text))); change.set_crate_graph(crate_graph); @@ -429,11 +428,6 @@ impl Analysis { self.with_db(|db| references::find_all_refs(&Semantics::new(db), position, search_scope)) } - /// Finds all methods and free functions for the file. Does not return tests! - pub fn find_all_methods(&self, file_id: FileId) -> Cancellable> { - self.with_db(|db| fn_references::find_all_methods(db, file_id)) - } - /// Returns a short text describing element at position. pub fn hover( &self, diff --git a/crates/ide/src/moniker.rs b/crates/ide/src/moniker.rs index 600a526300..852a8fd837 100644 --- a/crates/ide/src/moniker.rs +++ b/crates/ide/src/moniker.rs @@ -253,10 +253,14 @@ pub(crate) fn def_to_moniker( }, kind: if krate == from_crate { MonikerKind::Export } else { MonikerKind::Import }, package_information: { - let name = krate.display_name(db)?.to_string(); - let (repo, version) = match krate.origin(db) { - CrateOrigin::CratesIo { repo } => (repo?, krate.version(db)?), + let (name, repo, version) = match krate.origin(db) { + CrateOrigin::CratesIo { repo, name } => ( + name.unwrap_or(krate.display_name(db)?.canonical_name().to_string()), + repo?, + krate.version(db)?, + ), CrateOrigin::Lang(lang) => ( + krate.display_name(db)?.canonical_name().to_string(), "https://github.com/rust-lang/rust/".to_string(), match lang { LangCrateOrigin::Other => { diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs index 9395e914c4..e7d0a8be7f 100644 --- a/crates/ide/src/syntax_highlighting/highlight.rs +++ b/crates/ide/src/syntax_highlighting/highlight.rs @@ -87,9 +87,9 @@ fn punctuation( let parent = token.parent(); let parent_kind = parent.as_ref().map_or(EOF, SyntaxNode::kind); match (kind, parent_kind) { - (T![?], _) => HlTag::Operator(HlOperator::Other) | HlMod::ControlFlow, + (T![?], TRY_EXPR) => HlTag::Operator(HlOperator::Other) | HlMod::ControlFlow, (T![&], BIN_EXPR) => HlOperator::Bitwise.into(), - (T![&], _) => { + (T![&], REF_EXPR) => { let h = HlTag::Operator(HlOperator::Other).into(); let is_unsafe = parent .and_then(ast::RefExpr::cast) @@ -100,7 +100,9 @@ fn punctuation( h } } - (T![::] | T![->] | T![=>] | T![..] | T![=] | T![@] | T![.], _) => HlOperator::Other.into(), + (T![::] | T![->] | T![=>] | T![..] | T![..=] | T![=] | T![@] | T![.], _) => { + HlOperator::Other.into() + } (T![!], MACRO_CALL | MACRO_RULES) => HlPunct::MacroBang.into(), (T![!], NEVER_TYPE) => HlTag::BuiltinType.into(), (T![!], PREFIX_EXPR) => HlOperator::Logical.into(), @@ -129,7 +131,7 @@ fn punctuation( (T![+=] | T![-=] | T![*=] | T![/=] | T![%=], BIN_EXPR) => { Highlight::from(HlOperator::Arithmetic) | HlMod::Mutable } - (T![|] | T![&] | T![!] | T![^] | T![>>] | T![<<], BIN_EXPR) => HlOperator::Bitwise.into(), + (T![|] | T![&] | T![^] | T![>>] | T![<<], BIN_EXPR) => HlOperator::Bitwise.into(), (T![|=] | T![&=] | T![^=] | T![>>=] | T![<<=], BIN_EXPR) => { Highlight::from(HlOperator::Bitwise) | HlMod::Mutable } @@ -137,7 +139,6 @@ fn punctuation( (T![>] | T![<] | T![==] | T![>=] | T![<=] | T![!=], BIN_EXPR) => { HlOperator::Comparison.into() } - (_, PREFIX_EXPR | BIN_EXPR | RANGE_EXPR | RANGE_PAT | REST_PAT) => HlOperator::Other.into(), (_, ATTR) => HlTag::AttributeBracket.into(), (kind, _) => match kind { T!['['] | T![']'] => HlPunct::Bracket, diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html b/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html index e07fd3925c..9ed65fbc85 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html @@ -48,15 +48,15 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd impl foo { pub fn is_static() {} - pub fn is_not_static(&self) {} + pub fn is_not_static(&self) {} } trait t { fn t_is_static() {} - fn t_is_not_static(&self) {} + fn t_is_not_static(&self) {} } impl t for foo { pub fn is_static() {} - pub fn is_not_static(&self) {} + pub fn is_not_static(&self) {} } \ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html index eef5baea98..18045f1f55 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html @@ -125,7 +125,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd /// ```sh /// echo 1 /// ``` - pub fn foo(&self) -> bool { + pub fn foo(&self) -> bool { true } } diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_general.html b/crates/ide/src/syntax_highlighting/test_data/highlight_general.html index a97802cbbd..9f2b1926b5 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_general.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_general.html @@ -61,11 +61,11 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd } trait Bar { - fn bar(&self) -> i32; + fn bar(&self) -> i32; } impl Bar for Foo { - fn bar(&self) -> i32 { + fn bar(&self) -> i32 { self.x } } @@ -75,11 +75,11 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd f.baz(self) } - fn qux(&mut self) { + fn qux(&mut self) { self.x = 0; } - fn quop(&self) -> i32 { + fn quop(&self) -> i32 { self.x } } @@ -96,11 +96,11 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd f.baz(self) } - fn qux(&mut self) { + fn qux(&mut self) { self.x = 0; } - fn quop(&self) -> u32 { + fn quop(&self) -> u32 { self.x } } diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html b/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html index ced7d22f03..abcd80c280 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html @@ -42,7 +42,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } -
fn fixture(ra_fixture: &str) {}
+
fn fixture(ra_fixture: &str) {}
 
 fn main() {
     fixture(r#"
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html b/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html
index 2d85fc8c92..f98e0b1cda 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html
@@ -45,8 +45,8 @@ pre                 { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
 

 #[derive()]
 struct Foo<'a, 'b, 'c> where 'a: 'a, 'static: 'static {
-    field: &'a (),
-    field2: &'static (),
+    field: &'a (),
+    field2: &'static (),
 }
 impl<'a> Foo<'_, 'a, 'static>
 where
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
index c627bc9b09..a626cda3fe 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
@@ -62,16 +62,16 @@ pre                 { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
         () => (
             $crate::panicking::panic("explicit panic")
         ),
-        ($msg:literal $(,)?) => (
+        ($msg:literal $(,)?) => (
             $crate::panicking::panic($msg)
         ),
         // Use `panic_str` instead of `panic_display::<&str>` for non_fmt_panic lint.
-        ($msg:expr $(,)?) => (
+        ($msg:expr $(,)?) => (
             $crate::panicking::panic_str($msg)
         ),
         // Special-case the single-argument case for const_panic.
-        ("{}", $arg:expr $(,)?) => (
-            $crate::panicking::panic_display(&$arg)
+        ("{}", $arg:expr $(,)?) => (
+            $crate::panicking::panic_display(&$arg)
         ),
         ($fmt:expr, $($arg:tt)+) => (
             $crate::panicking::panic_fmt($crate::const_format_args!($fmt, $($arg)+))
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html b/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
index 0716bae751..1992bdc6ae 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
@@ -49,7 +49,7 @@ pre                 { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
 }
 macro_rules! unsafe_deref {
     () => {
-        *(&() as *const ())
+        *(&() as *const ())
     };
 }
 static mut MUT_GLOBAL: Struct = Struct { field: 0 };
@@ -63,7 +63,7 @@ pre                 { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
 
 struct Struct { field: i32 }
 impl Struct {
-    unsafe fn unsafe_method(&self) {}
+    unsafe fn unsafe_method(&self) {}
 }
 
 #[repr(packed)]
@@ -78,11 +78,11 @@ pre                 { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
 fn unsafe_trait_bound<T: UnsafeTrait>(_: T) {}
 
 trait DoTheAutoref {
-    fn calls_autoref(&self);
+    fn calls_autoref(&self);
 }
 
 impl DoTheAutoref for u16 {
-    fn calls_autoref(&self) {}
+    fn calls_autoref(&self) {}
 }
 
 fn main() {
diff --git a/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs b/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs
index 44712f4191..243972b049 100644
--- a/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs
+++ b/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs
@@ -11,6 +11,8 @@ use proc_macro_api::ProcMacroKind;
 
 use super::PanicMessage;
 
+pub use ra_server::TokenStream;
+
 pub(crate) struct Abi {
     exported_macros: Vec,
 }
diff --git a/crates/proc-macro-srv/src/abis/mod.rs b/crates/proc-macro-srv/src/abis/mod.rs
index f7d3a30919..2f854bc159 100644
--- a/crates/proc-macro-srv/src/abis/mod.rs
+++ b/crates/proc-macro-srv/src/abis/mod.rs
@@ -32,8 +32,8 @@ mod abi_sysroot;
 include!(concat!(env!("OUT_DIR"), "/rustc_version.rs"));
 
 // Used by `test/utils.rs`
-#[cfg(test)]
-pub(crate) use abi_1_63::TokenStream as TestTokenStream;
+#[cfg(all(test, feature = "sysroot-abi"))]
+pub(crate) use abi_sysroot::TokenStream as TestTokenStream;
 
 use super::dylib::LoadProcMacroDylibError;
 pub(crate) use abi_1_58::Abi as Abi_1_58;
@@ -144,3 +144,10 @@ impl Abi {
         }
     }
 }
+
+#[test]
+fn test_version_check() {
+    let path = paths::AbsPathBuf::assert(crate::proc_macro_test_dylib_path());
+    let info = proc_macro_api::read_dylib_info(&path).unwrap();
+    assert!(info.version.1 >= 50);
+}
diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs
index 3679bfc43c..72a2dfe72d 100644
--- a/crates/proc-macro-srv/src/lib.rs
+++ b/crates/proc-macro-srv/src/lib.rs
@@ -20,6 +20,8 @@
 mod dylib;
 mod abis;
 
+pub mod cli;
+
 use std::{
     collections::{hash_map::Entry, HashMap},
     env,
@@ -149,7 +151,10 @@ impl EnvSnapshot {
     }
 }
 
-pub mod cli;
+#[cfg(all(feature = "sysroot-abi", test))]
+mod tests;
 
 #[cfg(test)]
-mod tests;
+pub fn proc_macro_test_dylib_path() -> std::path::PathBuf {
+    proc_macro_test::PROC_MACRO_TEST_LOCATION.into()
+}
diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs
index 6339d56d01..b46cdddcf6 100644
--- a/crates/proc-macro-srv/src/tests/mod.rs
+++ b/crates/proc-macro-srv/src/tests/mod.rs
@@ -2,10 +2,10 @@
 
 #[macro_use]
 mod utils;
-use expect_test::expect;
-use paths::AbsPathBuf;
 use utils::*;
 
+use expect_test::expect;
+
 #[test]
 fn test_derive_empty() {
     assert_expand("DeriveEmpty", r#"struct S;"#, expect![[r#"SUBTREE $"#]]);
@@ -157,10 +157,3 @@ fn list_test_macros() {
         DeriveError [CustomDerive]"#]]
     .assert_eq(&res);
 }
-
-#[test]
-fn test_version_check() {
-    let path = AbsPathBuf::assert(fixtures::proc_macro_test_dylib_path());
-    let info = proc_macro_api::read_dylib_info(&path).unwrap();
-    assert!(info.version.1 >= 50);
-}
diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs
index f881fe8684..44b1b6588d 100644
--- a/crates/proc-macro-srv/src/tests/utils.rs
+++ b/crates/proc-macro-srv/src/tests/utils.rs
@@ -1,15 +1,9 @@
 //! utils used in proc-macro tests
 
-use crate::dylib;
-use crate::ProcMacroSrv;
 use expect_test::Expect;
 use std::str::FromStr;
 
-pub mod fixtures {
-    pub fn proc_macro_test_dylib_path() -> std::path::PathBuf {
-        proc_macro_test::PROC_MACRO_TEST_LOCATION.into()
-    }
-}
+use crate::{dylib, proc_macro_test_dylib_path, ProcMacroSrv};
 
 fn parse_string(code: &str) -> Option {
     // This is a bit strange. We need to parse a string into a token stream into
@@ -30,7 +24,7 @@ pub fn assert_expand_attr(macro_name: &str, ra_fixture: &str, attr_args: &str, e
 }
 
 fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect: Expect) {
-    let path = fixtures::proc_macro_test_dylib_path();
+    let path = proc_macro_test_dylib_path();
     let expander = dylib::Expander::new(&path).unwrap();
     let fixture = parse_string(input).unwrap();
     let attr = attr.map(|attr| parse_string(attr).unwrap().into_subtree());
@@ -40,7 +34,7 @@ fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect:
 }
 
 pub(crate) fn list() -> Vec {
-    let dylib_path = fixtures::proc_macro_test_dylib_path();
+    let dylib_path = proc_macro_test_dylib_path();
     let mut srv = ProcMacroSrv::default();
     let res = srv.list_macros(&dylib_path).unwrap();
     res.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect()
diff --git a/crates/project-model/src/build_scripts.rs b/crates/project-model/src/build_scripts.rs
index 837ea01619..32db42f1db 100644
--- a/crates/project-model/src/build_scripts.rs
+++ b/crates/project-model/src/build_scripts.rs
@@ -15,7 +15,7 @@ use rustc_hash::FxHashMap;
 use semver::Version;
 use serde::Deserialize;
 
-use crate::{cfg_flag::CfgFlag, CargoConfig, CargoWorkspace, Package};
+use crate::{cfg_flag::CfgFlag, CargoConfig, CargoFeatures, CargoWorkspace, Package};
 
 #[derive(Debug, Default, Clone, PartialEq, Eq)]
 pub struct WorkspaceBuildScripts {
@@ -49,7 +49,6 @@ impl WorkspaceBuildScripts {
 
         let mut cmd = Command::new(toolchain::cargo());
         cmd.envs(&config.extra_env);
-
         cmd.args(&["check", "--quiet", "--workspace", "--message-format=json"]);
 
         // --all-targets includes tests, benches and examples in addition to the
@@ -61,15 +60,18 @@ impl WorkspaceBuildScripts {
             cmd.args(&["--target", target]);
         }
 
-        if config.all_features {
-            cmd.arg("--all-features");
-        } else {
-            if config.no_default_features {
-                cmd.arg("--no-default-features");
+        match &config.features {
+            CargoFeatures::All => {
+                cmd.arg("--all-features");
             }
-            if !config.features.is_empty() {
-                cmd.arg("--features");
-                cmd.arg(config.features.join(" "));
+            CargoFeatures::Selected { features, no_default_features } => {
+                if *no_default_features {
+                    cmd.arg("--no-default-features");
+                }
+                if !features.is_empty() {
+                    cmd.arg("--features");
+                    cmd.arg(features.join(" "));
+                }
             }
         }
 
diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs
index 736d80041b..8e690f1125 100644
--- a/crates/project-model/src/cargo_workspace.rs
+++ b/crates/project-model/src/cargo_workspace.rs
@@ -71,35 +71,40 @@ impl Default for UnsetTestCrates {
     }
 }
 
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum CargoFeatures {
+    All,
+    Selected {
+        /// List of features to activate.
+        features: Vec,
+        /// Do not activate the `default` feature.
+        no_default_features: bool,
+    },
+}
+
+impl Default for CargoFeatures {
+    fn default() -> Self {
+        CargoFeatures::Selected { features: vec![], no_default_features: false }
+    }
+}
+
 #[derive(Default, Clone, Debug, PartialEq, Eq)]
 pub struct CargoConfig {
-    /// Do not activate the `default` feature.
-    pub no_default_features: bool,
-
-    /// Activate all available features
-    pub all_features: bool,
-
     /// List of features to activate.
-    /// This will be ignored if `cargo_all_features` is true.
-    pub features: Vec,
-
+    pub features: CargoFeatures,
     /// rustc target
     pub target: Option,
-
-    /// Don't load sysroot crates (`std`, `core` & friends). Might be useful
-    /// when debugging isolated issues.
-    pub no_sysroot: bool,
-
+    /// Sysroot loading behavior
+    pub sysroot: Option,
     /// rustc private crate source
     pub rustc_source: Option,
-
     /// crates to disable `#[cfg(test)]` on
     pub unset_test_crates: UnsetTestCrates,
-
+    /// Invoke `cargo check` through the RUSTC_WRAPPER.
     pub wrap_rustc_in_build_scripts: bool,
-
+    /// The command to run instead of `cargo check` for building build scripts.
     pub run_build_script_command: Option>,
-
+    /// Extra env vars to set when invoking the cargo command
     pub extra_env: FxHashMap,
 }
 
@@ -143,7 +148,7 @@ pub struct PackageData {
     pub targets: Vec,
     /// Does this package come from the local filesystem (and is editable)?
     pub is_local: bool,
-    // Whether this package is a member of the workspace
+    /// Whether this package is a member of the workspace
     pub is_member: bool,
     /// List of packages this package depends on
     pub dependencies: Vec,
@@ -249,8 +254,8 @@ impl TargetKind {
     }
 }
 
+// Deserialize helper for the cargo metadata
 #[derive(Deserialize, Default)]
-// Deserialise helper for the cargo metadata
 struct PackageMetadata {
     #[serde(rename = "rust-analyzer")]
     rust_analyzer: Option,
@@ -266,22 +271,25 @@ impl CargoWorkspace {
         let target = config
             .target
             .clone()
-            .or_else(|| cargo_config_build_target(cargo_toml, config))
-            .or_else(|| rustc_discover_host_triple(cargo_toml, config));
+            .or_else(|| cargo_config_build_target(cargo_toml, &config.extra_env))
+            .or_else(|| rustc_discover_host_triple(cargo_toml, &config.extra_env));
 
         let mut meta = MetadataCommand::new();
         meta.cargo_path(toolchain::cargo());
         meta.manifest_path(cargo_toml.to_path_buf());
-        if config.all_features {
-            meta.features(CargoOpt::AllFeatures);
-        } else {
-            if config.no_default_features {
-                // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures`
-                // https://github.com/oli-obk/cargo_metadata/issues/79
-                meta.features(CargoOpt::NoDefaultFeatures);
+        match &config.features {
+            CargoFeatures::All => {
+                meta.features(CargoOpt::AllFeatures);
             }
-            if !config.features.is_empty() {
-                meta.features(CargoOpt::SomeFeatures(config.features.clone()));
+            CargoFeatures::Selected { features, no_default_features } => {
+                if *no_default_features {
+                    // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures`
+                    // https://github.com/oli-obk/cargo_metadata/issues/79
+                    meta.features(CargoOpt::NoDefaultFeatures);
+                }
+                if !features.is_empty() {
+                    meta.features(CargoOpt::SomeFeatures(features.clone()));
+                }
             }
         }
         meta.current_dir(current_dir.as_os_str());
@@ -295,12 +303,9 @@ impl CargoWorkspace {
         // unclear whether cargo itself supports it.
         progress("metadata".to_string());
 
-        fn exec_with_env(
-            command: &cargo_metadata::MetadataCommand,
-            extra_env: &FxHashMap,
-        ) -> Result {
-            let mut command = command.cargo_command();
-            command.envs(extra_env);
+        (|| -> Result {
+            let mut command = meta.cargo_command();
+            command.envs(&config.extra_env);
             let output = command.output()?;
             if !output.status.success() {
                 return Err(cargo_metadata::Error::CargoMetadata {
@@ -312,12 +317,8 @@ impl CargoWorkspace {
                 .find(|line| line.starts_with('{'))
                 .ok_or(cargo_metadata::Error::NoJson)?;
             cargo_metadata::MetadataCommand::parse(stdout)
-        }
-
-        let meta = exec_with_env(&meta, &config.extra_env)
-            .with_context(|| format!("Failed to run `{:?}`", meta.cargo_command()))?;
-
-        Ok(meta)
+        })()
+        .with_context(|| format!("Failed to run `{:?}`", meta.cargo_command()))
     }
 
     pub fn new(mut meta: cargo_metadata::Metadata) -> CargoWorkspace {
@@ -386,32 +387,14 @@ impl CargoWorkspace {
         }
         let resolve = meta.resolve.expect("metadata executed with deps");
         for mut node in resolve.nodes {
-            let source = match pkg_by_id.get(&node.id) {
-                Some(&src) => src,
-                // FIXME: replace this and a similar branch below with `.unwrap`, once
-                // https://github.com/rust-lang/cargo/issues/7841
-                // is fixed and hits stable (around 1.43-is probably?).
-                None => {
-                    tracing::error!("Node id do not match in cargo metadata, ignoring {}", node.id);
-                    continue;
-                }
-            };
+            let &source = pkg_by_id.get(&node.id).unwrap();
             node.deps.sort_by(|a, b| a.pkg.cmp(&b.pkg));
-            for (dep_node, kind) in node
+            let dependencies = node
                 .deps
                 .iter()
-                .flat_map(|dep| DepKind::iter(&dep.dep_kinds).map(move |kind| (dep, kind)))
-            {
-                let pkg = match pkg_by_id.get(&dep_node.pkg) {
-                    Some(&pkg) => pkg,
-                    None => {
-                        tracing::error!(
-                            "Dep node id do not match in cargo metadata, ignoring {}",
-                            dep_node.pkg
-                        );
-                        continue;
-                    }
-                };
+                .flat_map(|dep| DepKind::iter(&dep.dep_kinds).map(move |kind| (dep, kind)));
+            for (dep_node, kind) in dependencies {
+                let &pkg = pkg_by_id.get(&dep_node.pkg).unwrap();
                 let dep = PackageDependency { name: dep_node.name.clone(), pkg, kind };
                 packages[source].dependencies.push(dep);
             }
@@ -456,10 +439,7 @@ impl CargoWorkspace {
                     found = true
                 }
                 self[pkg].dependencies.iter().find_map(|dep| {
-                    if &self[dep.pkg].manifest == manifest_path {
-                        return Some(self[pkg].manifest.clone());
-                    }
-                    None
+                    (&self[dep.pkg].manifest == manifest_path).then(|| self[pkg].manifest.clone())
                 })
             })
             .collect::>();
@@ -485,9 +465,12 @@ impl CargoWorkspace {
     }
 }
 
-fn rustc_discover_host_triple(cargo_toml: &ManifestPath, config: &CargoConfig) -> Option {
+fn rustc_discover_host_triple(
+    cargo_toml: &ManifestPath,
+    extra_env: &FxHashMap,
+) -> Option {
     let mut rustc = Command::new(toolchain::rustc());
-    rustc.envs(&config.extra_env);
+    rustc.envs(extra_env);
     rustc.current_dir(cargo_toml.parent()).arg("-vV");
     tracing::debug!("Discovering host platform by {:?}", rustc);
     match utf8_stdout(rustc) {
@@ -509,9 +492,12 @@ fn rustc_discover_host_triple(cargo_toml: &ManifestPath, config: &CargoConfig) -
     }
 }
 
-fn cargo_config_build_target(cargo_toml: &ManifestPath, config: &CargoConfig) -> Option {
+fn cargo_config_build_target(
+    cargo_toml: &ManifestPath,
+    extra_env: &FxHashMap,
+) -> Option {
     let mut cargo_config = Command::new(toolchain::cargo());
-    cargo_config.envs(&config.extra_env);
+    cargo_config.envs(extra_env);
     cargo_config
         .current_dir(cargo_toml.parent())
         .args(&["-Z", "unstable-options", "config", "get", "build.target"])
diff --git a/crates/project-model/src/lib.rs b/crates/project-model/src/lib.rs
index b81b7432f6..ce78ce8569 100644
--- a/crates/project-model/src/lib.rs
+++ b/crates/project-model/src/lib.rs
@@ -42,8 +42,8 @@ use rustc_hash::FxHashSet;
 pub use crate::{
     build_scripts::WorkspaceBuildScripts,
     cargo_workspace::{
-        CargoConfig, CargoWorkspace, Package, PackageData, PackageDependency, RustcSource, Target,
-        TargetData, TargetKind, UnsetTestCrates,
+        CargoConfig, CargoFeatures, CargoWorkspace, Package, PackageData, PackageDependency,
+        RustcSource, Target, TargetData, TargetKind, UnsetTestCrates,
     },
     manifest_path::ManifestPath,
     project_json::{ProjectJson, ProjectJsonData},
diff --git a/crates/project-model/src/project_json.rs b/crates/project-model/src/project_json.rs
index 63d1d0ace9..5133a14d53 100644
--- a/crates/project-model/src/project_json.rs
+++ b/crates/project-model/src/project_json.rs
@@ -110,14 +110,17 @@ impl ProjectJson {
                 .collect::>(),
         }
     }
+
     /// Returns the number of crates in the project.
     pub fn n_crates(&self) -> usize {
         self.crates.len()
     }
+
     /// Returns an iterator over the crates in the project.
     pub fn crates(&self) -> impl Iterator + '_ {
         self.crates.iter().enumerate().map(|(idx, krate)| (CrateId(idx as u32), krate))
     }
+
     /// Returns the path to the project's root folder.
     pub fn path(&self) -> &AbsPath {
         &self.project_root
diff --git a/crates/project-model/src/rustc_cfg.rs b/crates/project-model/src/rustc_cfg.rs
index 486cb143b8..3231361836 100644
--- a/crates/project-model/src/rustc_cfg.rs
+++ b/crates/project-model/src/rustc_cfg.rs
@@ -3,13 +3,14 @@
 use std::process::Command;
 
 use anyhow::Result;
+use rustc_hash::FxHashMap;
 
-use crate::{cfg_flag::CfgFlag, utf8_stdout, CargoConfig, ManifestPath};
+use crate::{cfg_flag::CfgFlag, utf8_stdout, ManifestPath};
 
 pub(crate) fn get(
     cargo_toml: Option<&ManifestPath>,
     target: Option<&str>,
-    config: &CargoConfig,
+    extra_env: &FxHashMap,
 ) -> Vec {
     let _p = profile::span("rustc_cfg::get");
     let mut res = Vec::with_capacity(6 * 2 + 1);
@@ -22,7 +23,7 @@ pub(crate) fn get(
         }
     }
 
-    match get_rust_cfgs(cargo_toml, target, config) {
+    match get_rust_cfgs(cargo_toml, target, extra_env) {
         Ok(rustc_cfgs) => {
             tracing::debug!(
                 "rustc cfgs found: {:?}",
@@ -42,11 +43,11 @@ pub(crate) fn get(
 fn get_rust_cfgs(
     cargo_toml: Option<&ManifestPath>,
     target: Option<&str>,
-    config: &CargoConfig,
+    extra_env: &FxHashMap,
 ) -> Result {
     if let Some(cargo_toml) = cargo_toml {
         let mut cargo_config = Command::new(toolchain::cargo());
-        cargo_config.envs(&config.extra_env);
+        cargo_config.envs(extra_env);
         cargo_config
             .current_dir(cargo_toml.parent())
             .args(&["-Z", "unstable-options", "rustc", "--print", "cfg"])
@@ -61,7 +62,7 @@ fn get_rust_cfgs(
     }
     // using unstable cargo features failed, fall back to using plain rustc
     let mut cmd = Command::new(toolchain::rustc());
-    cmd.envs(&config.extra_env);
+    cmd.envs(extra_env);
     cmd.args(&["--print", "cfg", "-O"]);
     if let Some(target) = target {
         cmd.args(&["--target", target]);
diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs
index 3282719fef..bc37e3d132 100644
--- a/crates/project-model/src/sysroot.rs
+++ b/crates/project-model/src/sysroot.rs
@@ -9,8 +9,9 @@ use std::{env, fs, iter, ops, path::PathBuf, process::Command};
 use anyhow::{format_err, Result};
 use la_arena::{Arena, Idx};
 use paths::{AbsPath, AbsPathBuf};
+use rustc_hash::FxHashMap;
 
-use crate::{utf8_stdout, CargoConfig, ManifestPath};
+use crate::{utf8_stdout, ManifestPath};
 
 #[derive(Debug, Clone, Eq, PartialEq)]
 pub struct Sysroot {
@@ -66,23 +67,37 @@ impl Sysroot {
     pub fn crates<'a>(&'a self) -> impl Iterator + ExactSizeIterator + 'a {
         self.crates.iter().map(|(id, _data)| id)
     }
+}
 
-    pub fn discover(dir: &AbsPath, config: &CargoConfig) -> Result {
+impl Sysroot {
+    pub fn discover(dir: &AbsPath, extra_env: &FxHashMap) -> Result {
         tracing::debug!("Discovering sysroot for {}", dir.display());
-        let sysroot_dir = discover_sysroot_dir(dir, config)?;
-        let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir, dir, config)?;
+        let sysroot_dir = discover_sysroot_dir(dir, extra_env)?;
+        let sysroot_src_dir =
+            discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env)?;
         let res = Sysroot::load(sysroot_dir, sysroot_src_dir)?;
         Ok(res)
     }
 
-    pub fn discover_rustc(cargo_toml: &ManifestPath, config: &CargoConfig) -> Option {
+    pub fn discover_rustc(
+        cargo_toml: &ManifestPath,
+        extra_env: &FxHashMap,
+    ) -> Option {
         tracing::debug!("Discovering rustc source for {}", cargo_toml.display());
         let current_dir = cargo_toml.parent();
-        discover_sysroot_dir(current_dir, config)
+        discover_sysroot_dir(current_dir, extra_env)
             .ok()
             .and_then(|sysroot_dir| get_rustc_src(&sysroot_dir))
     }
 
+    pub fn with_sysroot_dir(sysroot_dir: AbsPathBuf) -> Result {
+        let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir).ok_or_else(|| {
+            format_err!("can't load standard library from sysroot {}", sysroot_dir.display())
+        })?;
+        let res = Sysroot::load(sysroot_dir, sysroot_src_dir)?;
+        Ok(res)
+    }
+
     pub fn load(sysroot_dir: AbsPathBuf, sysroot_src_dir: AbsPathBuf) -> Result {
         let mut sysroot =
             Sysroot { root: sysroot_dir, src_root: sysroot_src_dir, crates: Arena::default() };
@@ -146,35 +161,43 @@ impl Sysroot {
     }
 }
 
-fn discover_sysroot_dir(current_dir: &AbsPath, config: &CargoConfig) -> Result {
+fn discover_sysroot_dir(
+    current_dir: &AbsPath,
+    extra_env: &FxHashMap,
+) -> Result {
     let mut rustc = Command::new(toolchain::rustc());
-    rustc.envs(&config.extra_env);
+    rustc.envs(extra_env);
     rustc.current_dir(current_dir).args(&["--print", "sysroot"]);
     tracing::debug!("Discovering sysroot by {:?}", rustc);
     let stdout = utf8_stdout(rustc)?;
     Ok(AbsPathBuf::assert(PathBuf::from(stdout)))
 }
 
-fn discover_sysroot_src_dir(
-    sysroot_path: &AbsPathBuf,
-    current_dir: &AbsPath,
-    config: &CargoConfig,
-) -> Result {
+fn discover_sysroot_src_dir(sysroot_path: &AbsPathBuf) -> Option {
     if let Ok(path) = env::var("RUST_SRC_PATH") {
-        let path = AbsPathBuf::try_from(path.as_str())
-            .map_err(|path| format_err!("RUST_SRC_PATH must be absolute: {}", path.display()))?;
-        let core = path.join("core");
-        if fs::metadata(&core).is_ok() {
-            tracing::debug!("Discovered sysroot by RUST_SRC_PATH: {}", path.display());
-            return Ok(path);
+        if let Ok(path) = AbsPathBuf::try_from(path.as_str()) {
+            let core = path.join("core");
+            if fs::metadata(&core).is_ok() {
+                tracing::debug!("Discovered sysroot by RUST_SRC_PATH: {}", path.display());
+                return Some(path);
+            }
+            tracing::debug!("RUST_SRC_PATH is set, but is invalid (no core: {:?}), ignoring", core);
+        } else {
+            tracing::debug!("RUST_SRC_PATH is set, but is invalid, ignoring");
         }
-        tracing::debug!("RUST_SRC_PATH is set, but is invalid (no core: {:?}), ignoring", core);
     }
 
     get_rust_src(sysroot_path)
+}
+fn discover_sysroot_src_dir_or_add_component(
+    sysroot_path: &AbsPathBuf,
+    current_dir: &AbsPath,
+    extra_env: &FxHashMap,
+) -> Result {
+    discover_sysroot_src_dir(sysroot_path)
         .or_else(|| {
             let mut rustup = Command::new(toolchain::rustup());
-            rustup.envs(&config.extra_env);
+            rustup.envs(extra_env);
             rustup.current_dir(current_dir).args(&["component", "add", "rust-src"]);
             utf8_stdout(rustup).ok()?;
             get_rust_src(sysroot_path)
diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs
index bea624bd54..e2444e2497 100644
--- a/crates/project-model/src/tests.rs
+++ b/crates/project-model/src/tests.rs
@@ -10,8 +10,8 @@ use paths::{AbsPath, AbsPathBuf};
 use serde::de::DeserializeOwned;
 
 use crate::{
-    CargoConfig, CargoWorkspace, CfgOverrides, ProjectJson, ProjectJsonData, ProjectWorkspace,
-    Sysroot, WorkspaceBuildScripts,
+    CargoWorkspace, CfgOverrides, ProjectJson, ProjectJsonData, ProjectWorkspace, Sysroot,
+    WorkspaceBuildScripts,
 };
 
 fn load_cargo(file: &str) -> CrateGraph {
@@ -101,7 +101,7 @@ fn to_crate_graph(project_workspace: ProjectWorkspace) -> CrateGraph {
                 Some(FileId(counter))
             }
         },
-        &CargoConfig::default(),
+        &Default::default(),
     )
 }
 
@@ -185,6 +185,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() {
                         ),
                         origin: CratesIo {
                             repo: None,
+                            name: Some(
+                                "hello-world",
+                            ),
                         },
                         is_proc_macro: false,
                     },
@@ -260,6 +263,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() {
                         ),
                         origin: CratesIo {
                             repo: None,
+                            name: Some(
+                                "hello-world",
+                            ),
                         },
                         is_proc_macro: false,
                     },
@@ -335,6 +341,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() {
                         ),
                         origin: CratesIo {
                             repo: None,
+                            name: Some(
+                                "hello-world",
+                            ),
                         },
                         is_proc_macro: false,
                     },
@@ -410,6 +419,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() {
                         ),
                         origin: CratesIo {
                             repo: None,
+                            name: Some(
+                                "hello-world",
+                            ),
                         },
                         is_proc_macro: false,
                     },
@@ -477,6 +489,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() {
                             repo: Some(
                                 "https://github.com/rust-lang/libc",
                             ),
+                            name: Some(
+                                "libc",
+                            ),
                         },
                         is_proc_macro: false,
                     },
@@ -567,6 +582,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() {
                         ),
                         origin: CratesIo {
                             repo: None,
+                            name: Some(
+                                "hello-world",
+                            ),
                         },
                         is_proc_macro: false,
                     },
@@ -644,6 +662,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() {
                         ),
                         origin: CratesIo {
                             repo: None,
+                            name: Some(
+                                "hello-world",
+                            ),
                         },
                         is_proc_macro: false,
                     },
@@ -721,6 +742,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() {
                         ),
                         origin: CratesIo {
                             repo: None,
+                            name: Some(
+                                "hello-world",
+                            ),
                         },
                         is_proc_macro: false,
                     },
@@ -798,6 +822,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() {
                         ),
                         origin: CratesIo {
                             repo: None,
+                            name: Some(
+                                "hello-world",
+                            ),
                         },
                         is_proc_macro: false,
                     },
@@ -865,6 +892,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() {
                             repo: Some(
                                 "https://github.com/rust-lang/libc",
                             ),
+                            name: Some(
+                                "libc",
+                            ),
                         },
                         is_proc_macro: false,
                     },
@@ -946,6 +976,9 @@ fn cargo_hello_world_project_model() {
                         ),
                         origin: CratesIo {
                             repo: None,
+                            name: Some(
+                                "hello-world",
+                            ),
                         },
                         is_proc_macro: false,
                     },
@@ -1023,6 +1056,9 @@ fn cargo_hello_world_project_model() {
                         ),
                         origin: CratesIo {
                             repo: None,
+                            name: Some(
+                                "hello-world",
+                            ),
                         },
                         is_proc_macro: false,
                     },
@@ -1100,6 +1136,9 @@ fn cargo_hello_world_project_model() {
                         ),
                         origin: CratesIo {
                             repo: None,
+                            name: Some(
+                                "hello-world",
+                            ),
                         },
                         is_proc_macro: false,
                     },
@@ -1177,6 +1216,9 @@ fn cargo_hello_world_project_model() {
                         ),
                         origin: CratesIo {
                             repo: None,
+                            name: Some(
+                                "hello-world",
+                            ),
                         },
                         is_proc_macro: false,
                     },
@@ -1244,6 +1286,9 @@ fn cargo_hello_world_project_model() {
                             repo: Some(
                                 "https://github.com/rust-lang/libc",
                             ),
+                            name: Some(
+                                "libc",
+                            ),
                         },
                         is_proc_macro: false,
                     },
@@ -1804,6 +1849,9 @@ fn rust_project_hello_world_project_model() {
                         ),
                         origin: CratesIo {
                             repo: None,
+                            name: Some(
+                                "hello_world",
+                            ),
                         },
                         is_proc_macro: false,
                     },
diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs
index bc4ab45dae..72ddf80928 100644
--- a/crates/project-model/src/workspace.rs
+++ b/crates/project-model/src/workspace.rs
@@ -21,8 +21,8 @@ use crate::{
     cfg_flag::CfgFlag,
     rustc_cfg,
     sysroot::SysrootCrate,
-    utf8_stdout, CargoConfig, CargoWorkspace, ManifestPath, ProjectJson, ProjectManifest, Sysroot,
-    TargetKind, WorkspaceBuildScripts,
+    utf8_stdout, CargoConfig, CargoWorkspace, ManifestPath, Package, ProjectJson, ProjectManifest,
+    Sysroot, TargetKind, WorkspaceBuildScripts,
 };
 
 /// A set of cfg-overrides per crate.
@@ -156,7 +156,11 @@ impl ProjectWorkspace {
                 })?;
                 let project_location = project_json.parent().to_path_buf();
                 let project_json = ProjectJson::new(&project_location, data);
-                ProjectWorkspace::load_inline(project_json, config.target.as_deref(), config)?
+                ProjectWorkspace::load_inline(
+                    project_json,
+                    config.target.as_deref(),
+                    &config.extra_env,
+                )?
             }
             ProjectManifest::CargoToml(cargo_toml) => {
                 let cargo_version = utf8_stdout({
@@ -184,20 +188,33 @@ impl ProjectWorkspace {
                 })?;
                 let cargo = CargoWorkspace::new(meta);
 
-                let sysroot = if config.no_sysroot {
-                    None
-                } else {
-                    Some(Sysroot::discover(cargo_toml.parent(), config).with_context(|| {
-                        format!(
+                let sysroot = match &config.sysroot {
+                    Some(RustcSource::Path(path)) => {
+                        Some(Sysroot::with_sysroot_dir(path.clone()).with_context(|| {
+                            format!(
+                                "Failed to find sysroot for Cargo.toml file {}.",
+                                cargo_toml.display()
+                            )
+                        })?)
+                    }
+                    Some(RustcSource::Discover) => Some(
+                        Sysroot::discover(cargo_toml.parent(), &config.extra_env).with_context(
+                            || {
+                                format!(
                             "Failed to find sysroot for Cargo.toml file {}. Is rust-src installed?",
                             cargo_toml.display()
                         )
-                    })?)
+                            },
+                        )?,
+                    ),
+                    None => None,
                 };
 
                 let rustc_dir = match &config.rustc_source {
                     Some(RustcSource::Path(path)) => ManifestPath::try_from(path.clone()).ok(),
-                    Some(RustcSource::Discover) => Sysroot::discover_rustc(&cargo_toml, config),
+                    Some(RustcSource::Discover) => {
+                        Sysroot::discover_rustc(&cargo_toml, &config.extra_env)
+                    }
                     None => None,
                 };
 
@@ -217,7 +234,8 @@ impl ProjectWorkspace {
                     None => None,
                 };
 
-                let rustc_cfg = rustc_cfg::get(Some(&cargo_toml), config.target.as_deref(), config);
+                let rustc_cfg =
+                    rustc_cfg::get(Some(&cargo_toml), config.target.as_deref(), &config.extra_env);
 
                 let cfg_overrides = config.cfg_overrides();
                 ProjectWorkspace::Cargo {
@@ -238,7 +256,7 @@ impl ProjectWorkspace {
     pub fn load_inline(
         project_json: ProjectJson,
         target: Option<&str>,
-        config: &CargoConfig,
+        extra_env: &FxHashMap,
     ) -> Result {
         let sysroot = match (project_json.sysroot.clone(), project_json.sysroot_src.clone()) {
             (Some(sysroot), Some(sysroot_src)) => Some(Sysroot::load(sysroot, sysroot_src)?),
@@ -260,7 +278,7 @@ impl ProjectWorkspace {
             (None, None) => None,
         };
 
-        let rustc_cfg = rustc_cfg::get(None, target, config);
+        let rustc_cfg = rustc_cfg::get(None, target, extra_env);
         Ok(ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg })
     }
 
@@ -270,9 +288,9 @@ impl ProjectWorkspace {
                 .first()
                 .and_then(|it| it.parent())
                 .ok_or_else(|| format_err!("No detached files to load"))?,
-            &CargoConfig::default(),
+            &Default::default(),
         )?;
-        let rustc_cfg = rustc_cfg::get(None, None, &CargoConfig::default());
+        let rustc_cfg = rustc_cfg::get(None, None, &Default::default());
         Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg })
     }
 
@@ -306,6 +324,13 @@ impl ProjectWorkspace {
     /// The return type contains the path and whether or not
     /// the root is a member of the current workspace
     pub fn to_roots(&self) -> Vec {
+        let mk_sysroot = |sysroot: Option<&Sysroot>| {
+            sysroot.map(|sysroot| PackageRoot {
+                is_local: false,
+                include: vec![sysroot.src_root().to_path_buf()],
+                exclude: Vec::new(),
+            })
+        };
         match self {
             ProjectWorkspace::Json { project, sysroot, rustc_cfg: _ } => project
                 .crates()
@@ -316,13 +341,7 @@ impl ProjectWorkspace {
                 })
                 .collect::>()
                 .into_iter()
-                .chain(sysroot.as_ref().into_iter().flat_map(|sysroot| {
-                    sysroot.crates().map(move |krate| PackageRoot {
-                        is_local: false,
-                        include: vec![sysroot[krate].root.parent().to_path_buf()],
-                        exclude: Vec::new(),
-                    })
-                }))
+                .chain(mk_sysroot(sysroot.as_ref()))
                 .collect::>(),
             ProjectWorkspace::Cargo {
                 cargo,
@@ -371,11 +390,7 @@ impl ProjectWorkspace {
                         }
                         PackageRoot { is_local, include, exclude }
                     })
-                    .chain(sysroot.iter().map(|sysroot| PackageRoot {
-                        is_local: false,
-                        include: vec![sysroot.src_root().to_path_buf()],
-                        exclude: Vec::new(),
-                    }))
+                    .chain(mk_sysroot(sysroot.as_ref()))
                     .chain(rustc.iter().flat_map(|rustc| {
                         rustc.packages().map(move |krate| PackageRoot {
                             is_local: false,
@@ -392,11 +407,7 @@ impl ProjectWorkspace {
                     include: vec![detached_file.clone()],
                     exclude: Vec::new(),
                 })
-                .chain(sysroot.crates().map(|krate| PackageRoot {
-                    is_local: false,
-                    include: vec![sysroot[krate].root.parent().to_path_buf()],
-                    exclude: Vec::new(),
-                }))
+                .chain(mk_sysroot(Some(sysroot)))
                 .collect(),
         }
     }
@@ -419,7 +430,7 @@ impl ProjectWorkspace {
         &self,
         load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
         load: &mut dyn FnMut(&AbsPath) -> Option,
-        config: &CargoConfig,
+        extra_env: &FxHashMap,
     ) -> CrateGraph {
         let _p = profile::span("ProjectWorkspace::to_crate_graph");
 
@@ -430,7 +441,7 @@ impl ProjectWorkspace {
                 load,
                 project,
                 sysroot,
-                config,
+                extra_env,
             ),
             ProjectWorkspace::Cargo {
                 cargo,
@@ -469,7 +480,7 @@ fn project_json_to_crate_graph(
     load: &mut dyn FnMut(&AbsPath) -> Option,
     project: &ProjectJson,
     sysroot: &Option,
-    config: &CargoConfig,
+    extra_env: &FxHashMap,
 ) -> CrateGraph {
     let mut crate_graph = CrateGraph::default();
     let sysroot_deps = sysroot
@@ -497,7 +508,7 @@ fn project_json_to_crate_graph(
             let target_cfgs = match krate.target.as_deref() {
                 Some(target) => cfg_cache
                     .entry(target)
-                    .or_insert_with(|| rustc_cfg::get(None, Some(target), config)),
+                    .or_insert_with(|| rustc_cfg::get(None, Some(target), extra_env)),
                 None => &rustc_cfg,
             };
 
@@ -516,9 +527,15 @@ fn project_json_to_crate_graph(
                     proc_macro,
                     krate.is_proc_macro,
                     if krate.display_name.is_some() {
-                        CrateOrigin::CratesIo { repo: krate.repository.clone() }
+                        CrateOrigin::CratesIo {
+                            repo: krate.repository.clone(),
+                            name: krate
+                                .display_name
+                                .clone()
+                                .map(|n| n.canonical_name().to_string()),
+                        }
                     } else {
-                        CrateOrigin::CratesIo { repo: None }
+                        CrateOrigin::CratesIo { repo: None, name: None }
                     },
                 ),
             )
@@ -630,6 +647,8 @@ fn cargo_to_crate_graph(
                     lib_tgt = Some((crate_id, cargo[tgt].name.clone()));
                     pkg_to_lib_crate.insert(pkg, crate_id);
                 }
+                // Even crates that don't set proc-macro = true are allowed to depend on proc_macro
+                // (just none of the APIs work when called outside of a proc macro).
                 if let Some(proc_macro) = libproc_macro {
                     add_dep_with_prelude(
                         &mut crate_graph,
@@ -645,19 +664,19 @@ fn cargo_to_crate_graph(
         }
 
         // Set deps to the core, std and to the lib target of the current package
-        for (from, kind) in pkg_crates.get(&pkg).into_iter().flatten() {
+        for &(from, kind) in pkg_crates.get(&pkg).into_iter().flatten() {
             // Add sysroot deps first so that a lib target named `core` etc. can overwrite them.
-            public_deps.add(*from, &mut crate_graph);
+            public_deps.add(from, &mut crate_graph);
 
             if let Some((to, name)) = lib_tgt.clone() {
-                if to != *from && *kind != TargetKind::BuildScript {
+                if to != from && kind != TargetKind::BuildScript {
                     // (build script can not depend on its library target)
 
                     // For root projects with dashes in their name,
                     // cargo metadata does not do any normalization,
                     // so we do it ourselves currently
                     let name = CrateName::normalize_dashes(&name);
-                    add_dep(&mut crate_graph, *from, name, to);
+                    add_dep(&mut crate_graph, from, name, to);
                 }
             }
         }
@@ -669,17 +688,17 @@ fn cargo_to_crate_graph(
         for dep in cargo[pkg].dependencies.iter() {
             let name = CrateName::new(&dep.name).unwrap();
             if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) {
-                for (from, kind) in pkg_crates.get(&pkg).into_iter().flatten() {
-                    if dep.kind == DepKind::Build && *kind != TargetKind::BuildScript {
+                for &(from, kind) in pkg_crates.get(&pkg).into_iter().flatten() {
+                    if dep.kind == DepKind::Build && kind != TargetKind::BuildScript {
                         // Only build scripts may depend on build dependencies.
                         continue;
                     }
-                    if dep.kind != DepKind::Build && *kind == TargetKind::BuildScript {
+                    if dep.kind != DepKind::Build && kind == TargetKind::BuildScript {
                         // Build scripts may only depend on build dependencies.
                         continue;
                     }
 
-                    add_dep(&mut crate_graph, *from, name.clone(), to)
+                    add_dep(&mut crate_graph, from, name.clone(), to)
                 }
             }
         }
@@ -690,9 +709,9 @@ fn cargo_to_crate_graph(
         // and create dependencies on them for the crates which opt-in to that
         if let Some(rustc_workspace) = rustc {
             handle_rustc_crates(
+                &mut crate_graph,
                 rustc_workspace,
                 load,
-                &mut crate_graph,
                 &cfg_options,
                 override_cfg,
                 load_proc_macro,
@@ -736,14 +755,17 @@ fn detached_files_to_crate_graph(
         let detached_file_crate = crate_graph.add_crate_root(
             file_id,
             Edition::CURRENT,
-            display_name,
+            display_name.clone(),
             None,
             cfg_options.clone(),
             cfg_options.clone(),
             Env::default(),
             Ok(Vec::new()),
             false,
-            CrateOrigin::CratesIo { repo: None },
+            CrateOrigin::CratesIo {
+                repo: None,
+                name: display_name.map(|n| n.canonical_name().to_string()),
+            },
         );
 
         public_deps.add(detached_file_crate, &mut crate_graph);
@@ -752,16 +774,16 @@ fn detached_files_to_crate_graph(
 }
 
 fn handle_rustc_crates(
+    crate_graph: &mut CrateGraph,
     rustc_workspace: &CargoWorkspace,
     load: &mut dyn FnMut(&AbsPath) -> Option,
-    crate_graph: &mut CrateGraph,
     cfg_options: &CfgOptions,
     override_cfg: &CfgOverrides,
     load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
-    pkg_to_lib_crate: &mut FxHashMap, CrateId>,
+    pkg_to_lib_crate: &mut FxHashMap,
     public_deps: &SysrootPublicDeps,
     cargo: &CargoWorkspace,
-    pkg_crates: &FxHashMap, Vec<(CrateId, TargetKind)>>,
+    pkg_crates: &FxHashMap>,
     build_scripts: &WorkspaceBuildScripts,
 ) {
     let mut rustc_pkg_crates = FxHashMap::default();
@@ -775,8 +797,8 @@ fn handle_rustc_crates(
         let mut queue = VecDeque::new();
         queue.push_back(root_pkg);
         while let Some(pkg) = queue.pop_front() {
-            // Don't duplicate packages if they are dependended on a diamond pattern
-            // N.B. if this line is omitted, we try to analyse over 4_800_000 crates
+            // Don't duplicate packages if they are dependent on a diamond pattern
+            // N.B. if this line is omitted, we try to analyze over 4_800_000 crates
             // which is not ideal
             if rustc_pkg_crates.contains_key(&pkg) {
                 continue;
@@ -919,7 +941,7 @@ fn add_target_crate_root(
         env,
         proc_macro,
         is_proc_macro,
-        CrateOrigin::CratesIo { repo: pkg.repository.clone() },
+        CrateOrigin::CratesIo { repo: pkg.repository.clone(), name: Some(pkg.name.clone()) },
     )
 }
 
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index 5392589186..a4e6550984 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -25,7 +25,7 @@ itertools = "0.10.3"
 scip = "0.1.1"
 lsp-types = { version = "0.93.1", features = ["proposed"] }
 parking_lot = "0.12.1"
-xflags = "0.2.4"
+xflags = "0.3.0"
 oorandom = "11.1.3"
 rustc-hash = "1.1.0"
 serde = { version = "1.0.137", features = ["derive"] }
@@ -34,7 +34,7 @@ threadpool = "1.8.1"
 rayon = "1.5.3"
 num_cpus = "1.13.1"
 mimalloc = { version = "0.1.29", default-features = false, optional = true }
-lsp-server = { version = "0.6.0", path = "../../lib/lsp-server" }
+lsp-server = { version = "0.7.0", path = "../../lib/lsp-server" }
 tracing = "0.1.35"
 tracing-subscriber = { version = "0.3.14", default-features = false, features = [
     "env-filter",
@@ -87,7 +87,6 @@ jemalloc = ["jemallocator", "profile/jemalloc"]
 force-always-assert = ["always-assert/force"]
 in-rust-tree = [
     "proc-macro-srv/sysroot-abi",
-    "sourcegen/in-rust-tree",
     "ide/in-rust-tree",
     "syntax/in-rust-tree",
 ]
diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs
index f6a6802972..eabfcf1944 100644
--- a/crates/rust-analyzer/src/bin/main.rs
+++ b/crates/rust-analyzer/src/bin/main.rs
@@ -37,16 +37,15 @@ fn main() {
         process::exit(code);
     }
 
-    if let Err(err) = try_main() {
+    let flags = flags::RustAnalyzer::from_env_or_exit();
+    if let Err(err) = try_main(flags) {
         tracing::error!("Unexpected error: {}", err);
         eprintln!("{}", err);
         process::exit(101);
     }
 }
 
-fn try_main() -> Result<()> {
-    let flags = flags::RustAnalyzer::from_env()?;
-
+fn try_main(flags: flags::RustAnalyzer) -> Result<()> {
     #[cfg(debug_assertions)]
     if flags.wait_dbg || env::var("RA_WAIT_DBG").is_ok() {
         #[allow(unused_mut)]
@@ -76,10 +75,6 @@ fn try_main() -> Result<()> {
                 println!("rust-analyzer {}", rust_analyzer::version());
                 return Ok(());
             }
-            if cmd.help {
-                println!("{}", flags::RustAnalyzer::HELP);
-                return Ok(());
-            }
             with_extra_thread("LspServer", run_server)?;
         }
         flags::RustAnalyzerCmd::ProcMacro(flags::ProcMacro) => {
diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs
index 1c39e9391a..e1675a030c 100644
--- a/crates/rust-analyzer/src/cargo_target_spec.rs
+++ b/crates/rust-analyzer/src/cargo_target_spec.rs
@@ -4,7 +4,7 @@ use std::mem;
 
 use cfg::{CfgAtom, CfgExpr};
 use ide::{FileId, RunnableKind, TestId};
-use project_model::{self, ManifestPath, TargetKind};
+use project_model::{self, CargoFeatures, ManifestPath, TargetKind};
 use vfs::AbsPathBuf;
 
 use crate::{global_state::GlobalStateSnapshot, Result};
@@ -35,41 +35,41 @@ impl CargoTargetSpec {
 
         match kind {
             RunnableKind::Test { test_id, attr } => {
-                args.push("test".to_string());
+                args.push("test".to_owned());
                 extra_args.push(test_id.to_string());
                 if let TestId::Path(_) = test_id {
-                    extra_args.push("--exact".to_string());
+                    extra_args.push("--exact".to_owned());
                 }
-                extra_args.push("--nocapture".to_string());
+                extra_args.push("--nocapture".to_owned());
                 if attr.ignore {
-                    extra_args.push("--ignored".to_string());
+                    extra_args.push("--ignored".to_owned());
                 }
             }
             RunnableKind::TestMod { path } => {
-                args.push("test".to_string());
-                extra_args.push(path.to_string());
-                extra_args.push("--nocapture".to_string());
+                args.push("test".to_owned());
+                extra_args.push(path.clone());
+                extra_args.push("--nocapture".to_owned());
             }
             RunnableKind::Bench { test_id } => {
-                args.push("bench".to_string());
+                args.push("bench".to_owned());
                 extra_args.push(test_id.to_string());
                 if let TestId::Path(_) = test_id {
-                    extra_args.push("--exact".to_string());
+                    extra_args.push("--exact".to_owned());
                 }
-                extra_args.push("--nocapture".to_string());
+                extra_args.push("--nocapture".to_owned());
             }
             RunnableKind::DocTest { test_id } => {
-                args.push("test".to_string());
-                args.push("--doc".to_string());
+                args.push("test".to_owned());
+                args.push("--doc".to_owned());
                 extra_args.push(test_id.to_string());
-                extra_args.push("--nocapture".to_string());
+                extra_args.push("--nocapture".to_owned());
             }
             RunnableKind::Bin => {
                 let subcommand = match spec {
                     Some(CargoTargetSpec { target_kind: TargetKind::Test, .. }) => "test",
                     _ => "run",
                 };
-                args.push(subcommand.to_string());
+                args.push(subcommand.to_owned());
             }
         }
 
@@ -82,29 +82,35 @@ impl CargoTargetSpec {
         };
 
         let cargo_config = snap.config.cargo();
-        if cargo_config.all_features {
-            args.push("--all-features".to_string());
 
-            for feature in target_required_features {
-                args.push("--features".to_string());
-                args.push(feature);
-            }
-        } else {
-            let mut features = Vec::new();
-            if let Some(cfg) = cfg.as_ref() {
-                required_features(cfg, &mut features);
+        match &cargo_config.features {
+            CargoFeatures::All => {
+                args.push("--all-features".to_owned());
+                for feature in target_required_features {
+                    args.push("--features".to_owned());
+                    args.push(feature);
+                }
             }
+            CargoFeatures::Selected { features, no_default_features } => {
+                let mut feats = Vec::new();
+                if let Some(cfg) = cfg.as_ref() {
+                    required_features(cfg, &mut feats);
+                }
 
-            features.extend(cargo_config.features);
-            features.extend(target_required_features);
+                feats.extend(features.iter().cloned());
+                feats.extend(target_required_features);
 
-            features.dedup();
-            for feature in features {
-                args.push("--features".to_string());
-                args.push(feature);
+                feats.dedup();
+                for feature in feats {
+                    args.push("--features".to_owned());
+                    args.push(feature);
+                }
+
+                if *no_default_features {
+                    args.push("--no-default-features".to_owned());
+                }
             }
         }
-
         Ok((args, extra_args))
     }
 
@@ -136,7 +142,7 @@ impl CargoTargetSpec {
     }
 
     pub(crate) fn push_to(self, buf: &mut Vec, kind: &RunnableKind) {
-        buf.push("--package".to_string());
+        buf.push("--package".to_owned());
         buf.push(self.package);
 
         // Can't mix --doc with other target flags
@@ -145,23 +151,23 @@ impl CargoTargetSpec {
         }
         match self.target_kind {
             TargetKind::Bin => {
-                buf.push("--bin".to_string());
+                buf.push("--bin".to_owned());
                 buf.push(self.target);
             }
             TargetKind::Test => {
-                buf.push("--test".to_string());
+                buf.push("--test".to_owned());
                 buf.push(self.target);
             }
             TargetKind::Bench => {
-                buf.push("--bench".to_string());
+                buf.push("--bench".to_owned());
                 buf.push(self.target);
             }
             TargetKind::Example => {
-                buf.push("--example".to_string());
+                buf.push("--example".to_owned());
                 buf.push(self.target);
             }
             TargetKind::Lib => {
-                buf.push("--lib".to_string());
+                buf.push("--lib".to_owned());
             }
             TargetKind::Other | TargetKind::BuildScript => (),
         }
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index 80128e43fd..01fccc83e8 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -24,7 +24,7 @@ use ide_db::base_db::{
 use itertools::Itertools;
 use oorandom::Rand32;
 use profile::{Bytes, StopWatch};
-use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
+use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustcSource};
 use rayon::prelude::*;
 use rustc_hash::FxHashSet;
 use stdx::format_to;
@@ -55,7 +55,10 @@ impl flags::AnalysisStats {
         };
 
         let mut cargo_config = CargoConfig::default();
-        cargo_config.no_sysroot = self.no_sysroot;
+        cargo_config.sysroot = match self.no_sysroot {
+            true => None,
+            false => Some(RustcSource::Discover),
+        };
         let load_cargo_config = LoadCargoConfig {
             load_out_dirs_from_check: !self.disable_build_scripts,
             with_proc_macro: !self.disable_proc_macros,
@@ -81,7 +84,7 @@ impl flags::AnalysisStats {
         };
 
         let (host, vfs, _proc_macro) =
-            load_workspace(workspace, &cargo_config, &load_cargo_config)?;
+            load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
         let db = host.raw_database();
         eprint!("{:<20} {}", "Database loaded:", db_load_sw.elapsed());
         eprint!(" (metadata {}", metadata_time);
diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs
index aa32654fbd..5bcc97e226 100644
--- a/crates/rust-analyzer/src/cli/flags.rs
+++ b/crates/rust-analyzer/src/cli/flags.rs
@@ -31,8 +31,6 @@ xflags::xflags! {
         default cmd lsp-server {
             /// Print version.
             optional --version
-            /// Print help.
-            optional -h, --help
 
             /// Dump a LSP config JSON schema.
             optional --print-config-schema
@@ -54,10 +52,10 @@ xflags::xflags! {
         }
 
         /// Batch typecheck project and print summary statistics
-        cmd analysis-stats
+        cmd analysis-stats {
             /// Directory with Cargo.toml.
             required path: PathBuf
-        {
+
             optional --output format: OutputFormat
 
             /// Randomize order in which crates, modules, and items are processed.
@@ -84,38 +82,37 @@ xflags::xflags! {
             optional --skip-inference
         }
 
-        cmd diagnostics
+        cmd diagnostics {
             /// Directory with Cargo.toml.
             required path: PathBuf
-        {
+
             /// Don't run build scripts or load `OUT_DIR` values by running `cargo check` before analysis.
             optional --disable-build-scripts
             /// Don't use expand proc macros.
             optional --disable-proc-macros
         }
 
-        cmd ssr
+        cmd ssr {
             /// A structured search replace rule (`$a.foo($b) ==> bar($a, $b)`)
             repeated rule: SsrRule
-        {}
+        }
 
-        cmd search
+        cmd search {
             /// A structured search replace pattern (`$a.foo($b)`)
             repeated pattern: SsrPattern
-        {
             /// Prints debug information for any nodes with source exactly equal to snippet.
             optional --debug snippet: String
         }
 
         cmd proc-macro {}
 
-        cmd lsif
+        cmd lsif {
             required path: PathBuf
-        {}
+        }
 
-        cmd scip
+        cmd scip {
             required path: PathBuf
-        {}
+        }
     }
 }
 
@@ -150,7 +147,6 @@ pub enum RustAnalyzerCmd {
 #[derive(Debug)]
 pub struct LspServer {
     pub version: bool,
-    pub help: bool,
     pub print_config_schema: bool,
 }
 
@@ -218,7 +214,10 @@ pub struct Scip {
 }
 
 impl RustAnalyzer {
-    pub const HELP: &'static str = Self::HELP_;
+    #[allow(dead_code)]
+    pub fn from_env_or_exit() -> Self {
+        Self::from_env_or_exit_()
+    }
 
     #[allow(dead_code)]
     pub fn from_env() -> xflags::Result {
diff --git a/crates/rust-analyzer/src/cli/load_cargo.rs b/crates/rust-analyzer/src/cli/load_cargo.rs
index 88953096e2..5dba545b87 100644
--- a/crates/rust-analyzer/src/cli/load_cargo.rs
+++ b/crates/rust-analyzer/src/cli/load_cargo.rs
@@ -6,7 +6,7 @@ use anyhow::Result;
 use crossbeam_channel::{unbounded, Receiver};
 use hir::db::DefDatabase;
 use ide::{AnalysisHost, Change};
-use ide_db::base_db::CrateGraph;
+use ide_db::{base_db::CrateGraph, FxHashMap};
 use proc_macro_api::ProcMacroServer;
 use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
 use vfs::{loader::Handle, AbsPath, AbsPathBuf};
@@ -38,7 +38,7 @@ pub fn load_workspace_at(
         workspace.set_build_scripts(build_scripts)
     }
 
-    load_workspace(workspace, cargo_config, load_config)
+    load_workspace(workspace, &cargo_config.extra_env, load_config)
 }
 
 // Note: Since this function is used by external tools that use rust-analyzer as a library
@@ -48,7 +48,7 @@ pub fn load_workspace_at(
 // these tools need access to `ProjectWorkspace`, too, which `load_workspace_at` hides.
 pub fn load_workspace(
     ws: ProjectWorkspace,
-    cargo_config: &CargoConfig,
+    extra_env: &FxHashMap,
     load_config: &LoadCargoConfig,
 ) -> Result<(AnalysisHost, vfs::Vfs, Option)> {
     let (sender, receiver) = unbounded();
@@ -60,10 +60,26 @@ pub fn load_workspace(
     };
 
     let proc_macro_client = if load_config.with_proc_macro {
-        let path = AbsPathBuf::assert(std::env::current_exe()?);
-        Ok(ProcMacroServer::spawn(path, &["proc-macro"]).unwrap())
+        let mut path = AbsPathBuf::assert(std::env::current_exe()?);
+        let mut args = vec!["proc-macro"];
+
+        if let ProjectWorkspace::Cargo { sysroot, .. } | ProjectWorkspace::Json { sysroot, .. } =
+            &ws
+        {
+            if let Some(sysroot) = sysroot.as_ref() {
+                let standalone_server_name =
+                    format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX);
+                let server_path = sysroot.root().join("libexec").join(&standalone_server_name);
+                if std::fs::metadata(&server_path).is_ok() {
+                    path = server_path;
+                    args = vec![];
+                }
+            }
+        }
+
+        ProcMacroServer::spawn(path.clone(), args.clone()).map_err(|e| e.to_string())
     } else {
-        Err("proc macro server not started".to_owned())
+        Err("proc macro server disabled".to_owned())
     };
 
     let crate_graph = ws.to_crate_graph(
@@ -76,7 +92,7 @@ pub fn load_workspace(
             vfs.set_file_contents(path.clone(), contents);
             vfs.file_id(&path)
         },
-        cargo_config,
+        extra_env,
     );
 
     let project_folders = ProjectFolders::new(&[ws], &[]);
diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs
index 79577bf78c..748306ea57 100644
--- a/crates/rust-analyzer/src/cli/lsif.rs
+++ b/crates/rust-analyzer/src/cli/lsif.rs
@@ -300,7 +300,7 @@ impl flags::Lsif {
         let workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?;
 
         let (host, vfs, _proc_macro) =
-            load_workspace(workspace, &cargo_config, &load_cargo_config)?;
+            load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
         let db = host.raw_database();
         let analysis = host.analysis();
 
diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs
index 05c16bb39e..2c29b3ee3a 100644
--- a/crates/rust-analyzer/src/cli/scip.rs
+++ b/crates/rust-analyzer/src/cli/scip.rs
@@ -40,7 +40,8 @@ impl flags::Scip {
 
         let workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?;
 
-        let (host, vfs, _) = load_workspace(workspace, &cargo_config, &load_cargo_config)?;
+        let (host, vfs, _) =
+            load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
         let db = host.raw_database();
         let analysis = host.analysis();
 
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 9ef79e6f38..577a8640a4 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -7,7 +7,7 @@
 //! configure the server itself, feature flags are passed into analysis, and
 //! tweak things like automatic insertion of `()` in completions.
 
-use std::{ffi::OsString, fmt, iter, path::PathBuf};
+use std::{fmt, iter, path::PathBuf};
 
 use flycheck::FlycheckConfig;
 use ide::{
@@ -22,7 +22,8 @@ use ide_db::{
 use itertools::Itertools;
 use lsp_types::{ClientCapabilities, MarkupKind};
 use project_model::{
-    CargoConfig, ProjectJson, ProjectJsonData, ProjectManifest, RustcSource, UnsetTestCrates,
+    CargoConfig, CargoFeatures, ProjectJson, ProjectJsonData, ProjectManifest, RustcSource,
+    UnsetTestCrates,
 };
 use rustc_hash::{FxHashMap, FxHashSet};
 use serde::{de::DeserializeOwned, Deserialize};
@@ -90,11 +91,16 @@ config_data! {
         /// List of features to activate.
         ///
         /// Set this to `"all"` to pass `--all-features` to cargo.
-        cargo_features: CargoFeatures      = "[]",
+        cargo_features: CargoFeaturesDef      = "[]",
         /// Whether to pass `--no-default-features` to cargo.
         cargo_noDefaultFeatures: bool    = "false",
-        /// Internal config for debugging, disables loading of sysroot crates.
-        cargo_noSysroot: bool            = "false",
+        /// Relative path to the sysroot, or "discover" to try to automatically find it via
+        /// "rustc --print sysroot".
+        ///
+        /// Unsetting this disables sysroot loading.
+        ///
+        /// This option does not take effect until rust-analyzer is restarted.
+        cargo_sysroot: Option    = "\"discover\"",
         /// Compilation target override (target triple).
         cargo_target: Option     = "null",
         /// Unsets `#[cfg(test)]` for the specified crates.
@@ -109,12 +115,13 @@ config_data! {
         /// Extra arguments for `cargo check`.
         checkOnSave_extraArgs: Vec               = "[]",
         /// Extra environment variables that will be set when running `cargo check`.
+        /// Extends `#rust-analyzer.cargo.extraEnv#`.
         checkOnSave_extraEnv: FxHashMap = "{}",
         /// List of features to activate. Defaults to
         /// `#rust-analyzer.cargo.features#`.
         ///
         /// Set to `"all"` to pass `--all-features` to Cargo.
-        checkOnSave_features: Option      = "null",
+        checkOnSave_features: Option      = "null",
         /// Whether to pass `--no-default-features` to Cargo. Defaults to
         /// `#rust-analyzer.cargo.noDefaultFeatures#`.
         checkOnSave_noDefaultFeatures: Option      = "null",
@@ -975,15 +982,17 @@ impl Config {
         self.data.lru_capacity
     }
 
-    pub fn proc_macro_srv(&self) -> Option<(AbsPathBuf, Vec)> {
+    pub fn proc_macro_srv(&self) -> Option<(AbsPathBuf, /* is path explicitly set */ bool)> {
         if !self.data.procMacro_enable {
             return None;
         }
-        let path = match &self.data.procMacro_server {
-            Some(it) => self.root_path.join(it),
-            None => AbsPathBuf::assert(std::env::current_exe().ok()?),
-        };
-        Some((path, vec!["proc-macro".into()]))
+        Some(match &self.data.procMacro_server {
+            Some(it) => (
+                AbsPathBuf::try_from(it.clone()).unwrap_or_else(|path| self.root_path.join(path)),
+                true,
+            ),
+            None => (AbsPathBuf::assert(std::env::current_exe().ok()?), false),
+        })
     }
 
     pub fn dummy_replacements(&self) -> &FxHashMap, Box<[Box]>> {
@@ -1026,16 +1035,24 @@ impl Config {
                 RustcSource::Path(self.root_path.join(rustc_src))
             }
         });
+        let sysroot = self.data.cargo_sysroot.as_ref().map(|sysroot| {
+            if sysroot == "discover" {
+                RustcSource::Discover
+            } else {
+                RustcSource::Path(self.root_path.join(sysroot))
+            }
+        });
 
         CargoConfig {
-            no_default_features: self.data.cargo_noDefaultFeatures,
-            all_features: matches!(self.data.cargo_features, CargoFeatures::All),
             features: match &self.data.cargo_features {
-                CargoFeatures::All => vec![],
-                CargoFeatures::Listed(it) => it.clone(),
+                CargoFeaturesDef::All => CargoFeatures::All,
+                CargoFeaturesDef::Selected(features) => CargoFeatures::Selected {
+                    features: features.clone(),
+                    no_default_features: self.data.cargo_noDefaultFeatures,
+                },
             },
             target: self.data.cargo_target.clone(),
-            no_sysroot: self.data.cargo_noSysroot,
+            sysroot,
             rustc_source,
             unset_test_crates: UnsetTestCrates::Only(self.data.cargo_unsetTest.clone()),
             wrap_rustc_in_build_scripts: self.data.cargo_buildScripts_useRustcWrapper,
@@ -1086,7 +1103,7 @@ impl Config {
                     .unwrap_or(self.data.cargo_noDefaultFeatures),
                 all_features: matches!(
                     self.data.checkOnSave_features.as_ref().unwrap_or(&self.data.cargo_features),
-                    CargoFeatures::All
+                    CargoFeaturesDef::All
                 ),
                 features: match self
                     .data
@@ -1094,8 +1111,8 @@ impl Config {
                     .clone()
                     .unwrap_or_else(|| self.data.cargo_features.clone())
                 {
-                    CargoFeatures::All => vec![],
-                    CargoFeatures::Listed(it) => it,
+                    CargoFeaturesDef::All => vec![],
+                    CargoFeaturesDef::Selected(it) => it,
                 },
                 extra_args: self.data.checkOnSave_extraArgs.clone(),
                 extra_env: self.check_on_save_extra_env(),
@@ -1564,10 +1581,10 @@ enum CallableCompletionDef {
 
 #[derive(Deserialize, Debug, Clone)]
 #[serde(untagged)]
-enum CargoFeatures {
+enum CargoFeaturesDef {
     #[serde(deserialize_with = "de_unit_v::all")]
     All,
-    Listed(Vec),
+    Selected(Vec),
 }
 
 #[derive(Deserialize, Debug, Clone)]
@@ -1912,7 +1929,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
                 "Only show mutable reborrow hints."
             ]
         },
-        "CargoFeatures" => set! {
+        "CargoFeaturesDef" => set! {
             "anyOf": [
                 {
                     "type": "string",
@@ -1929,7 +1946,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
                 }
             ],
         },
-        "Option" => set! {
+        "Option" => set! {
             "anyOf": [
                 {
                     "type": "string",
diff --git a/crates/rust-analyzer/src/from_proto.rs b/crates/rust-analyzer/src/from_proto.rs
index 7bdd34d1f0..f2db9a2733 100644
--- a/crates/rust-analyzer/src/from_proto.rs
+++ b/crates/rust-analyzer/src/from_proto.rs
@@ -95,22 +95,22 @@ pub(crate) fn annotation(
 
     match resolve {
         lsp_ext::CodeLensResolveData::Impls(params) => {
-            let file_id =
-                snap.url_to_file_id(¶ms.text_document_position_params.text_document.uri)?;
+            let pos @ FilePosition { file_id, .. } =
+                file_position(snap, params.text_document_position_params)?;
             let line_index = snap.file_line_index(file_id)?;
 
             Ok(Annotation {
                 range: text_range(&line_index, code_lens.range)?,
-                kind: AnnotationKind::HasImpls { file_id, data: None },
+                kind: AnnotationKind::HasImpls { pos, data: None },
             })
         }
         lsp_ext::CodeLensResolveData::References(params) => {
-            let file_id = snap.url_to_file_id(¶ms.text_document.uri)?;
+            let pos @ FilePosition { file_id, .. } = file_position(snap, params)?;
             let line_index = snap.file_line_index(file_id)?;
 
             Ok(Annotation {
                 range: text_range(&line_index, code_lens.range)?,
-                kind: AnnotationKind::HasReferences { file_id, data: None },
+                kind: AnnotationKind::HasReferences { pos, data: None },
             })
         }
     }
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index 92df4d70fd..000ff88e45 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -185,11 +185,48 @@ impl GlobalState {
         let (change, changed_files) = {
             let mut change = Change::new();
             let (vfs, line_endings_map) = &mut *self.vfs.write();
-            let changed_files = vfs.take_changes();
+            let mut changed_files = vfs.take_changes();
             if changed_files.is_empty() {
                 return false;
             }
 
+            // important: this needs to be a stable sort, the order between changes is relevant
+            // for the same file ids
+            changed_files.sort_by_key(|file| file.file_id);
+            // We need to fix up the changed events a bit, if we have a create or modify for a file
+            // id that is followed by a delete we actually no longer observe the file text from the
+            // create or modify which may cause problems later on
+            changed_files.dedup_by(|a, b| {
+                use vfs::ChangeKind::*;
+
+                if a.file_id != b.file_id {
+                    return false;
+                }
+
+                match (a.change_kind, b.change_kind) {
+                    // duplicate can be merged
+                    (Create, Create) | (Modify, Modify) | (Delete, Delete) => true,
+                    // just leave the create, modify is irrelevant
+                    (Create, Modify) => {
+                        std::mem::swap(a, b);
+                        true
+                    }
+                    // modify becomes irrelevant if the file is deleted
+                    (Modify, Delete) => true,
+                    // we should fully remove this occurrence,
+                    // but leaving just a delete works as well
+                    (Create, Delete) => true,
+                    // this is equivalent to a modify
+                    (Delete, Create) => {
+                        a.change_kind = Modify;
+                        true
+                    }
+                    // can't really occur
+                    (Modify, Create) => false,
+                    (Delete, Modify) => false,
+                }
+            });
+
             for file in &changed_files {
                 if let Some(path) = vfs.file_path(file.file_id).as_path() {
                     let path = path.to_path_buf();
@@ -317,6 +354,10 @@ impl GlobalState {
         }
     }
 
+    pub(crate) fn is_completed(&self, request: &lsp_server::Request) -> bool {
+        self.req_queue.incoming.is_completed(&request.id)
+    }
+
     fn send(&mut self, message: lsp_server::Message) {
         self.sender.send(message).unwrap()
     }
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index 3cfbc2e4e4..15922dac65 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -425,7 +425,9 @@ impl GlobalState {
     fn handle_task(&mut self, prime_caches_progress: &mut Vec, task: Task) {
         match task {
             Task::Response(response) => self.respond(response),
-            Task::Retry(req) => self.on_request(req),
+            // Only retry requests that haven't been cancelled. Otherwise we do unnecessary work.
+            Task::Retry(req) if !self.is_completed(&req) => self.on_request(req),
+            Task::Retry(_) => (),
             Task::Diagnostics(diagnostics_per_file) => {
                 for (file_id, diagnostics) in diagnostics_per_file {
                     self.diagnostics.set_native_diagnostics(file_id, diagnostics)
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index 4cf5de46c4..f873489394 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -143,7 +143,7 @@ impl GlobalState {
                             project_model::ProjectWorkspace::load_inline(
                                 it.clone(),
                                 cargo_config.target.as_deref(),
-                                &cargo_config,
+                                &cargo_config.extra_env,
                             )
                         }
                     })
@@ -306,41 +306,50 @@ impl GlobalState {
             format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX);
 
         if self.proc_macro_clients.is_empty() {
-            if let Some((path, args)) = self.config.proc_macro_srv() {
+            if let Some((path, path_manually_set)) = self.config.proc_macro_srv() {
                 tracing::info!("Spawning proc-macro servers");
                 self.proc_macro_clients = self
                     .workspaces
                     .iter()
                     .map(|ws| {
-                        let mut args = args.clone();
-                        let mut path = path.clone();
-
-                        if let ProjectWorkspace::Cargo { sysroot, .. }
-                        | ProjectWorkspace::Json { sysroot, .. } = ws
-                        {
-                            tracing::debug!("Found a cargo workspace...");
-                            if let Some(sysroot) = sysroot.as_ref() {
-                                tracing::debug!("Found a cargo workspace with a sysroot...");
-                                let server_path =
-                                    sysroot.root().join("libexec").join(&standalone_server_name);
-                                if std::fs::metadata(&server_path).is_ok() {
-                                    tracing::debug!(
-                                        "And the server exists at {}",
-                                        server_path.display()
-                                    );
-                                    path = server_path;
-                                    args = vec![];
-                                } else {
-                                    tracing::debug!(
-                                        "And the server does not exist at {}",
-                                        server_path.display()
-                                    );
+                        let (path, args) = if path_manually_set {
+                            tracing::debug!(
+                                "Pro-macro server path explicitly set: {}",
+                                path.display()
+                            );
+                            (path.clone(), vec![])
+                        } else {
+                            let mut sysroot_server = None;
+                            if let ProjectWorkspace::Cargo { sysroot, .. }
+                            | ProjectWorkspace::Json { sysroot, .. } = ws
+                            {
+                                if let Some(sysroot) = sysroot.as_ref() {
+                                    let server_path = sysroot
+                                        .root()
+                                        .join("libexec")
+                                        .join(&standalone_server_name);
+                                    if std::fs::metadata(&server_path).is_ok() {
+                                        tracing::debug!(
+                                            "Sysroot proc-macro server exists at {}",
+                                            server_path.display()
+                                        );
+                                        sysroot_server = Some(server_path);
+                                    } else {
+                                        tracing::debug!(
+                                            "Sysroot proc-macro server does not exist at {}",
+                                            server_path.display()
+                                        );
+                                    }
                                 }
                             }
-                        }
+                            sysroot_server.map_or_else(
+                                || (path.clone(), vec!["proc-macro".to_owned()]),
+                                |path| (path, vec![]),
+                            )
+                        };
 
                         tracing::info!(?args, "Using proc-macro server at {}", path.display(),);
-                        ProcMacroServer::spawn(path.clone(), args.clone()).map_err(|err| {
+                        ProcMacroServer::spawn(path.clone(), args).map_err(|err| {
                             let error = format!(
                                 "Failed to run proc-macro server from path {}, error: {:?}",
                                 path.display(),
@@ -402,7 +411,7 @@ impl GlobalState {
                 crate_graph.extend(ws.to_crate_graph(
                     &mut load_proc_macro,
                     &mut load,
-                    &self.config.cargo(),
+                    &self.config.cargo().extra_env,
                 ));
             }
             crate_graph
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs
index 1de801e23e..5936454a7c 100644
--- a/crates/rust-analyzer/src/to_proto.rs
+++ b/crates/rust-analyzer/src/to_proto.rs
@@ -1177,13 +1177,13 @@ pub(crate) fn code_lens(
                 })
             }
         }
-        AnnotationKind::HasImpls { file_id, data } => {
+        AnnotationKind::HasImpls { pos: file_range, data } => {
             if !client_commands_config.show_reference {
                 return Ok(());
             }
-            let line_index = snap.file_line_index(file_id)?;
+            let line_index = snap.file_line_index(file_range.file_id)?;
             let annotation_range = range(&line_index, annotation.range);
-            let url = url(snap, file_id);
+            let url = url(snap, file_range.file_id);
 
             let id = lsp_types::TextDocumentIdentifier { uri: url.clone() };
 
@@ -1221,13 +1221,13 @@ pub(crate) fn code_lens(
                 data: Some(to_value(lsp_ext::CodeLensResolveData::Impls(goto_params)).unwrap()),
             })
         }
-        AnnotationKind::HasReferences { file_id, data } => {
+        AnnotationKind::HasReferences { pos: file_range, data } => {
             if !client_commands_config.show_reference {
                 return Ok(());
             }
-            let line_index = snap.file_line_index(file_id)?;
+            let line_index = snap.file_line_index(file_range.file_id)?;
             let annotation_range = range(&line_index, annotation.range);
-            let url = url(snap, file_id);
+            let url = url(snap, file_range.file_id);
 
             let id = lsp_types::TextDocumentIdentifier { uri: url.clone() };
 
diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs
index 4cc46af1b1..fa55f7d90c 100644
--- a/crates/rust-analyzer/tests/slow-tests/main.rs
+++ b/crates/rust-analyzer/tests/slow-tests/main.rs
@@ -18,7 +18,6 @@ mod tidy;
 
 use std::{collections::HashMap, path::PathBuf, time::Instant};
 
-use expect_test::expect;
 use lsp_types::{
     notification::DidOpenTextDocument,
     request::{
@@ -60,7 +59,7 @@ use std::collections::Spam;
 "#,
     )
     .with_config(serde_json::json!({
-        "cargo": { "noSysroot": false }
+        "cargo": { "sysroot": "discover" }
     }))
     .server()
     .wait_until_workspace_is_loaded();
@@ -615,7 +614,7 @@ fn main() {{}}
         librs, libs
     ))
     .with_config(serde_json::json!({
-        "cargo": { "noSysroot": false }
+        "cargo": { "sysroot": "discover" }
     }))
     .server()
     .wait_until_workspace_is_loaded();
@@ -743,7 +742,7 @@ fn main() {
             "buildScripts": {
                 "enable": true
             },
-            "noSysroot": true,
+            "sysroot": null,
         }
     }))
     .server()
@@ -821,7 +820,10 @@ fn main() {
 }
 
 #[test]
+// FIXME: Re-enable once we can run proc-macro tests on rust-lang/rust-analyzer again
+#[cfg(any())]
 fn resolve_proc_macro() {
+    use expect_test::expect;
     if skip_slow_tests() {
         return;
     }
@@ -898,7 +900,7 @@ pub fn foo(_input: TokenStream) -> TokenStream {
             "buildScripts": {
                 "enable": true
             },
-            "noSysroot": true,
+            "sysroot": null,
         },
         "procMacro": {
             "enable": true,
diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs
index 4fa88c3c6d..7257445dab 100644
--- a/crates/rust-analyzer/tests/slow-tests/support.rs
+++ b/crates/rust-analyzer/tests/slow-tests/support.rs
@@ -34,7 +34,7 @@ impl<'a> Project<'a> {
             config: serde_json::json!({
                 "cargo": {
                     // Loading standard library is costly, let's ignore it by default
-                    "noSysroot": true,
+                    "sysroot": null,
                     // Can't use test binary as rustc wrapper.
                     "buildScripts": {
                         "useRustcWrapper": false
diff --git a/crates/sourcegen/Cargo.toml b/crates/sourcegen/Cargo.toml
index a84110d940..e75867e2d8 100644
--- a/crates/sourcegen/Cargo.toml
+++ b/crates/sourcegen/Cargo.toml
@@ -11,6 +11,3 @@ doctest = false
 
 [dependencies]
 xshell = "0.2.2"
-
-[features]
-in-rust-tree = []
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index bb92c51e9a..fe82aa9072 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -873,3 +873,33 @@ impl ast::MatchGuard {
         support::child(&self.syntax)
     }
 }
+
+impl From for ast::AnyHasAttrs {
+    fn from(node: ast::Item) -> Self {
+        Self::new(node)
+    }
+}
+
+impl From for ast::AnyHasAttrs {
+    fn from(node: ast::AssocItem) -> Self {
+        Self::new(node)
+    }
+}
+
+impl From for ast::AnyHasAttrs {
+    fn from(node: ast::Variant) -> Self {
+        Self::new(node)
+    }
+}
+
+impl From for ast::AnyHasAttrs {
+    fn from(node: ast::RecordField) -> Self {
+        Self::new(node)
+    }
+}
+
+impl From for ast::AnyHasAttrs {
+    fn from(node: ast::TupleField) -> Self {
+        Self::new(node)
+    }
+}
diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs
index 6df29db474..10386b5b7b 100644
--- a/crates/test-utils/src/minicore.rs
+++ b/crates/test-utils/src/minicore.rs
@@ -37,6 +37,7 @@
 //!     add:
 //!     as_ref: sized
 //!     drop:
+//!     generator: pin
 
 pub mod marker {
     // region:sized
@@ -182,6 +183,19 @@ pub mod ops {
             type Target: ?Sized;
             fn deref(&self) -> &Self::Target;
         }
+
+        impl Deref for &T {
+            type Target = T;
+            fn deref(&self) -> &T {
+                loop {}
+            }
+        }
+        impl Deref for &mut T {
+            type Target = T;
+            fn deref(&self) -> &T {
+                loop {}
+            }
+        }
         // region:deref_mut
         #[lang = "deref_mut"]
         pub trait DerefMut: Deref {
@@ -347,6 +361,27 @@ pub mod ops {
         fn add(self, rhs: Rhs) -> Self::Output;
     }
     // endregion:add
+
+    // region:generator
+    mod generator {
+        use crate::pin::Pin;
+
+        #[lang = "generator"]
+        pub trait Generator {
+            type Yield;
+            #[lang = "generator_return"]
+            type Return;
+            fn resume(self: Pin<&mut Self>, arg: R) -> GeneratorState;
+        }
+
+        #[lang = "generator_state"]
+        pub enum GeneratorState {
+            Yielded(Y),
+            Complete(R),
+        }
+    }
+    pub use self::generator::{Generator, GeneratorState};
+    // endregion:generator
 }
 
 // region:eq
@@ -455,6 +490,19 @@ pub mod pin {
     pub struct Pin

{ pointer: P, } + impl

Pin

{ + pub fn new(pointer: P) -> Pin

{ + loop {} + } + } + // region:deref + impl crate::ops::Deref for Pin

{ + type Target = P::Target; + fn deref(&self) -> &P::Target { + loop {} + } + } + // endregion:deref } // endregion:pin diff --git a/docs/dev/README.md b/docs/dev/README.md index c7f152acc2..4ac75b4bbf 100644 --- a/docs/dev/README.md +++ b/docs/dev/README.md @@ -98,7 +98,7 @@ After I am done with the fix, I use `cargo xtask install --client` to try the ne If I need to fix something in the `rust-analyzer` crate, I feel sad because it's on the boundary between the two processes, and working there is slow. I usually just `cargo xtask install --server` and poke changes from my live environment. Note that this uses `--release`, which is usually faster overall, because loading stdlib into debug version of rust-analyzer takes a lot of time. -To speed things up, sometimes I open a temporary hello-world project which has `"rust-analyzer.cargo.noSysroot": true` in `.code/settings.json`. +To speed things up, sometimes I open a temporary hello-world project which has `"rust-analyzer.cargo.sysroot": null` in `.code/settings.json`. This flag causes rust-analyzer to skip loading the sysroot, which greatly reduces the amount of things rust-analyzer needs to do, and makes printf's more useful. Note that you should only use the `eprint!` family of macros for debugging: stdout is used for LSP communication, and `print!` would break it. diff --git a/docs/dev/guide.md b/docs/dev/guide.md index 808eb5d10b..c9ff0b6c29 100644 --- a/docs/dev/guide.md +++ b/docs/dev/guide.md @@ -40,8 +40,8 @@ terms of files and offsets, and **not** in terms of Rust concepts like structs, traits, etc. The "typed" API with Rust specific types is slightly lower in the stack, we'll talk about it later. -[`AnalysisHost`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/lib.rs#L265-L284 -[`Analysis`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/lib.rs#L291-L478 +[`AnalysisHost`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/lib.rs#L265-L284 +[`Analysis`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/lib.rs#L291-L478 The reason for this separation of `Analysis` and `AnalysisHost` is that we want to apply changes "uniquely", but we might also want to fork an `Analysis` and send it to diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index 996d4c023d..acf0aaea85 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -64,10 +64,15 @@ Set this to `"all"` to pass `--all-features` to cargo. -- Whether to pass `--no-default-features` to cargo. -- -[[rust-analyzer.cargo.noSysroot]]rust-analyzer.cargo.noSysroot (default: `false`):: +[[rust-analyzer.cargo.sysroot]]rust-analyzer.cargo.sysroot (default: `"discover"`):: + -- -Internal config for debugging, disables loading of sysroot crates. +Relative path to the sysroot, or "discover" to try to automatically find it via +"rustc --print sysroot". + +Unsetting this disables sysroot loading. + +This option does not take effect until rust-analyzer is restarted. -- [[rust-analyzer.cargo.target]]rust-analyzer.cargo.target (default: `null`):: + @@ -103,6 +108,7 @@ Extra arguments for `cargo check`. + -- Extra environment variables that will be set when running `cargo check`. +Extends `#rust-analyzer.cargo.extraEnv#`. -- [[rust-analyzer.checkOnSave.features]]rust-analyzer.checkOnSave.features (default: `null`):: + diff --git a/editors/code/package.json b/editors/code/package.json index 94b41c049b..f1dd3aa79f 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -468,10 +468,13 @@ "default": false, "type": "boolean" }, - "rust-analyzer.cargo.noSysroot": { - "markdownDescription": "Internal config for debugging, disables loading of sysroot crates.", - "default": false, - "type": "boolean" + "rust-analyzer.cargo.sysroot": { + "markdownDescription": "Relative path to the sysroot, or \"discover\" to try to automatically find it via\n\"rustc --print sysroot\".\n\nUnsetting this disables sysroot loading.\n\nThis option does not take effect until rust-analyzer is restarted.", + "default": "discover", + "type": [ + "null", + "string" + ] }, "rust-analyzer.cargo.target": { "markdownDescription": "Compilation target override (target triple).", @@ -515,7 +518,7 @@ } }, "rust-analyzer.checkOnSave.extraEnv": { - "markdownDescription": "Extra environment variables that will be set when running `cargo check`.", + "markdownDescription": "Extra environment variables that will be set when running `cargo check`.\nExtends `#rust-analyzer.cargo.extraEnv#`.", "default": {}, "type": "object" }, diff --git a/lib/lsp-server/Cargo.toml b/lib/lsp-server/Cargo.toml index 204d120d07..b236b156cf 100644 --- a/lib/lsp-server/Cargo.toml +++ b/lib/lsp-server/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lsp-server" -version = "0.6.0" +version = "0.7.0" description = "Generic LSP server scaffold." license = "MIT OR Apache-2.0" repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/lsp-server" @@ -8,9 +8,9 @@ edition = "2021" [dependencies] log = "0.4.17" -serde_json = "1.0.81" -serde = { version = "1.0.137", features = ["derive"] } -crossbeam-channel = "0.5.5" +serde_json = "1.0.85" +serde = { version = "1.0.144", features = ["derive"] } +crossbeam-channel = "0.5.6" [dev-dependencies] -lsp-types = "0.93.0" +lsp-types = "0.93.1" diff --git a/lib/lsp-server/src/msg.rs b/lib/lsp-server/src/msg.rs index 97e5bd35ce..b241561f9c 100644 --- a/lib/lsp-server/src/msg.rs +++ b/lib/lsp-server/src/msg.rs @@ -98,7 +98,7 @@ pub struct ResponseError { } #[derive(Clone, Copy, Debug)] -#[allow(unused)] +#[non_exhaustive] pub enum ErrorCode { // Defined by JSON RPC: ParseError = -32700, @@ -135,6 +135,14 @@ pub enum ErrorCode { /// /// @since 3.17.0 ServerCancelled = -32802, + + /// A request failed but it was syntactically correct, e.g the + /// method name was known and the parameters were valid. The error + /// message should contain human readable information about why + /// the request failed. + /// + /// @since 3.17.0 + RequestFailed = -32803, } #[derive(Debug, Serialize, Deserialize, Clone)] diff --git a/lib/lsp-server/src/req_queue.rs b/lib/lsp-server/src/req_queue.rs index 1f3d447153..e5f19be20b 100644 --- a/lib/lsp-server/src/req_queue.rs +++ b/lib/lsp-server/src/req_queue.rs @@ -35,6 +35,7 @@ impl Incoming { pub fn register(&mut self, id: RequestId, data: I) { self.pending.insert(id, data); } + pub fn cancel(&mut self, id: RequestId) -> Option { let _data = self.complete(id.clone())?; let error = ResponseError { @@ -44,9 +45,14 @@ impl Incoming { }; Some(Response { id, result: None, error: Some(error) }) } + pub fn complete(&mut self, id: RequestId) -> Option { self.pending.remove(&id) } + + pub fn is_completed(&self, id: &RequestId) -> bool { + !self.pending.contains_key(id) + } } impl Outgoing { @@ -56,6 +62,7 @@ impl Outgoing { self.next_id += 1; Request::new(id, method, params) } + pub fn complete(&mut self, id: RequestId) -> Option { self.pending.remove(&id) } diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index 95d44e9b9d..14816912b7 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml @@ -11,5 +11,5 @@ anyhow = "1.0.57" flate2 = "1.0.24" write-json = "0.1.2" xshell = "0.2.2" -xflags = "0.2.4" +xflags = "0.3.0" # Avoid adding more dependencies to this crate diff --git a/xtask/src/flags.rs b/xtask/src/flags.rs index 993c64ccea..0fce488983 100644 --- a/xtask/src/flags.rs +++ b/xtask/src/flags.rs @@ -7,10 +7,6 @@ xflags::xflags! { /// Run custom build command. cmd xtask { - default cmd help { - /// Print help information. - optional -h, --help - } /// Install rust-analyzer server or editor plugin. cmd install { @@ -42,9 +38,9 @@ xflags::xflags! { optional --dry-run } /// Builds a benchmark version of rust-analyzer and puts it into `./target`. - cmd bb + cmd bb { required suffix: String - {} + } } } @@ -58,7 +54,6 @@ pub struct Xtask { #[derive(Debug)] pub enum XtaskCmd { - Help(Help), Install(Install), FuzzTests(FuzzTests), Release(Release), @@ -68,11 +63,6 @@ pub enum XtaskCmd { Bb(Bb), } -#[derive(Debug)] -pub struct Help { - pub help: bool, -} - #[derive(Debug)] pub struct Install { pub client: bool, @@ -111,7 +101,10 @@ pub struct Bb { } impl Xtask { - pub const HELP: &'static str = Self::HELP_; + #[allow(dead_code)] + pub fn from_env_or_exit() -> Self { + Self::from_env_or_exit_() + } #[allow(dead_code)] pub fn from_env() -> xflags::Result { diff --git a/xtask/src/main.rs b/xtask/src/main.rs index 335ac324a5..a37f469adc 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -25,15 +25,12 @@ use std::{ use xshell::{cmd, Shell}; fn main() -> anyhow::Result<()> { + let flags = flags::Xtask::from_env_or_exit(); + let sh = &Shell::new()?; sh.change_dir(project_root()); - let flags = flags::Xtask::from_env()?; match flags.subcommand { - flags::XtaskCmd::Help(_) => { - println!("{}", flags::Xtask::HELP); - Ok(()) - } flags::XtaskCmd::Install(cmd) => cmd.run(sh), flags::XtaskCmd::FuzzTests(_) => run_fuzzer(sh), flags::XtaskCmd::Release(cmd) => cmd.run(sh),