Merge commit '55d9a533b309119c8acd13061581b43ae8840823' into sync-from-ra

This commit is contained in:
Laurențiu Nicola 2024-04-20 19:17:08 +03:00
commit c8c635a3f8
237 changed files with 6273 additions and 3337 deletions

View file

@ -22,6 +22,8 @@ Otherwise please try to provide information which will help us to fix the issue
**rustc version**: (eg. output of `rustc -V`)
**editor or extension**: (eg. VSCode, Vim, Emacs, etc. For VSCode users, specify your extension version; for users of other editors, provide the distribution if applicable)
**relevant settings**: (eg. client settings, or environment variables like `CARGO`, `RUSTC`, `RUSTUP_HOME` or `CARGO_HOME`)
**repository link (if public, optional)**: (eg. [rust-analyzer](https://github.com/rust-lang/rust-analyzer))

View file

@ -91,7 +91,7 @@ jobs:
run: cargo build --quiet ${{ env.USE_SYSROOT_ABI }}
- name: Test
if: matrix.os == 'ubuntu-latest' || github.event_name == 'push'
if: matrix.os == 'ubuntu-latest' || matrix.os == 'windows-latest' || github.event_name == 'push'
run: cargo test ${{ env.USE_SYSROOT_ABI }} -- --nocapture --quiet
- name: Switch to stable toolchain

View file

@ -58,7 +58,7 @@ jobs:
key: ${{ runner.os }}-target-${{ github.sha }}
- name: Upload build metrics
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: build-${{ github.sha }}
path: target/build.json
@ -95,7 +95,7 @@ jobs:
run: cargo xtask metrics "${{ matrix.names }}"
- name: Upload metrics
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.names }}-${{ github.sha }}
path: target/${{ matrix.names }}.json
@ -109,32 +109,32 @@ jobs:
uses: actions/checkout@v4
- name: Download build metrics
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
name: build-${{ github.sha }}
- name: Download self metrics
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
name: self-${{ github.sha }}
- name: Download ripgrep-13.0.0 metrics
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
name: ripgrep-13.0.0-${{ github.sha }}
- name: Download webrender-2022 metrics
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
name: webrender-2022-${{ github.sha }}
- name: Download diesel-1.4.8 metrics
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
name: diesel-1.4.8-${{ github.sha }}
- name: Download hyper-0.14.18 metrics
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
name: hyper-0.14.18-${{ github.sha }}

30
CONTRIBUTING.md Normal file
View file

@ -0,0 +1,30 @@
# Contributing to rust-analyzer
Thank you for your interest in contributing to rust-analyzer! There are many ways to contribute
and we appreciate all of them.
To get a quick overview of the crates and structure of the project take a look at the
[./docs/dev](./docs/dev) folder.
If you have any questions please ask them in the [rust-analyzer zulip stream](
https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer) or if unsure where
to start out when working on a concrete issue drop a comment on the related issue for mentoring
instructions (general discussions are recommended to happen on zulip though).
## Fixing a bug or improving a feature
Generally it's fine to just work on these kinds of things and put a pull-request out for it. If there
is an issue accompanying it make sure to link it in the pull request description so it can be closed
afterwards or linked for context.
If you want to find something to fix or work on keep a look out for the `C-bug` and `C-enhancement`
labels.
## Implementing a new feature
It's advised to first open an issue for any kind of new feature so the team can tell upfront whether
the feature is desirable or not before any implementation work happens. We want to minimize the
possibility of someone putting a lot of work into a feature that is then going to waste as we deem
it out of scope (be it due to generally not fitting in with rust-analyzer, or just not having the
maintenance capacity). If there already is a feature issue open but it is not clear whether it is
considered accepted feel free to just drop a comment and ask!

75
Cargo.lock generated
View file

@ -161,9 +161,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chalk-derive"
version = "0.96.0"
version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5676cea088c32290fe65c82895be9d06dd21e0fa49bb97ca840529e9417ab71a"
checksum = "92a0aedc4ac2adc5c0b7dc9ec38c5c816284ad28da6d4ecd01873b9683f54972"
dependencies = [
"proc-macro2",
"quote",
@ -173,20 +173,19 @@ dependencies = [
[[package]]
name = "chalk-ir"
version = "0.96.0"
version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff550c2cdd63ff74394214dce03d06386928a641c0f08837535f04af573a966d"
checksum = "db18493569b190f7266a04901e520fc3a5c00564475154287906f8a27302c119"
dependencies = [
"bitflags 2.4.2",
"chalk-derive",
"lazy_static",
]
[[package]]
name = "chalk-recursive"
version = "0.96.0"
version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c4559e5c9b200240453b07d893f9c3c74413b53b0d33cbe272c68b0b77aa1c3"
checksum = "ae4ba8ce5bd2e1b59f1f79495bc8704db09a8285e51cc5ddf01d9baee1bf447d"
dependencies = [
"chalk-derive",
"chalk-ir",
@ -197,9 +196,9 @@ dependencies = [
[[package]]
name = "chalk-solve"
version = "0.96.0"
version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0882e68ce9eb5a0a2413806538494d19df6ee520ab17d1faf489e952f32e98b8"
checksum = "b2ec1b3b7f7b1ec38f099ef39c2bc3ea29335be1b8316d114baff46d96d131e9"
dependencies = [
"chalk-derive",
"chalk-ir",
@ -552,6 +551,7 @@ dependencies = [
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"limit",
"mbe",
"parser",
"rustc-hash",
"smallvec",
"span",
@ -695,6 +695,7 @@ version = "0.0.0"
dependencies = [
"arrayvec",
"base-db",
"bitflags 2.4.2",
"cov-mark",
"crossbeam-channel",
"either",
@ -781,6 +782,7 @@ checksum = "7b0b929d511467233429c45a44ac1dcaa21ba0f5ba11e4879e6ed28ddb4f9df4"
dependencies = [
"equivalent",
"hashbrown",
"serde",
]
[[package]]
@ -1594,6 +1596,7 @@ dependencies = [
"ide",
"ide-db",
"ide-ssr",
"indexmap",
"itertools",
"load-cargo",
"lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1622,6 +1625,7 @@ dependencies = [
"test-fixture",
"test-utils",
"tikv-jemallocator",
"toml",
"toolchain",
"tracing",
"tracing-subscriber",
@ -1775,6 +1779,15 @@ dependencies = [
"syn",
]
[[package]]
name = "serde_spanned"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb3622f419d1296904700073ea6cc23ad690adbd66f13ea683df73298736f0c1"
dependencies = [
"serde",
]
[[package]]
name = "sharded-slab"
version = "0.1.7"
@ -1822,6 +1835,7 @@ dependencies = [
"salsa",
"stdx",
"syntax",
"text-size",
"vfs",
]
@ -2025,6 +2039,40 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "toml"
version = "0.8.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1a195ec8c9da26928f773888e0742ca3ca1040c6cd859c919c9f59c1954ab35"
dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
"toml_edit",
]
[[package]]
name = "toml_datetime"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1"
dependencies = [
"serde",
]
[[package]]
name = "toml_edit"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d34d383cd00a163b4a5b85053df514d45bc330f6de7737edfe0a93311d1eaa03"
dependencies = [
"indexmap",
"serde",
"serde_spanned",
"toml_datetime",
"winnow",
]
[[package]]
name = "toolchain"
version = "0.0.0"
@ -2401,6 +2449,15 @@ version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8"
[[package]]
name = "winnow"
version = "0.5.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8434aeec7b290e8da5c3f0d628cb0eac6cabcb31d14bb74f779a08109a5914d6"
dependencies = [
"memchr",
]
[[package]]
name = "write-json"
version = "0.1.4"

View file

@ -22,6 +22,7 @@ smol_str.opt-level = 3
text-size.opt-level = 3
# This speeds up `cargo xtask dist`.
miniz_oxide.opt-level = 3
salsa.opt-level = 3
[profile.release]
incremental = true
@ -106,10 +107,10 @@ arrayvec = "0.7.4"
bitflags = "2.4.1"
cargo_metadata = "0.18.1"
camino = "1.1.6"
chalk-solve = { version = "0.96.0", default-features = false }
chalk-ir = "0.96.0"
chalk-recursive = { version = "0.96.0", default-features = false }
chalk-derive = "0.96.0"
chalk-solve = { version = "0.97.0", default-features = false }
chalk-ir = "0.97.0"
chalk-recursive = { version = "0.97.0", default-features = false }
chalk-derive = "0.97.0"
command-group = "2.0.1"
crossbeam-channel = "0.5.8"
dissimilar = "1.0.7"
@ -188,6 +189,8 @@ enum_variant_names = "allow"
new_ret_no_self = "allow"
# Has a bunch of false positives
useless_asref = "allow"
# Has false positives
assigning_clones = "allow"
## Following lints should be tackled at some point
too_many_arguments = "allow"

View file

@ -13,8 +13,9 @@ https://rust-analyzer.github.io/manual.html#installation
## Documentation
If you want to **contribute** to rust-analyzer or are just curious about how
things work under the hood, check the [./docs/dev](./docs/dev) folder.
If you want to **contribute** to rust-analyzer check out the [CONTRIBUTING.md](./CONTRIBUTING.md) or
if you are just curious about how things work under the hood, check the [./docs/dev](./docs/dev)
folder.
If you want to **use** rust-analyzer's language server with your editor of
choice, check [the manual](https://rust-analyzer.github.io/manual.html) folder.

View file

@ -19,6 +19,10 @@ use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
// Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`,
// then the crate for the proc-macro hasn't been build yet as the build data is missing.
pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct SourceRootId(pub u32);
/// Files are grouped into source roots. A source root is a directory on the
/// file systems which is watched for changes. Typically it corresponds to a
/// Rust crate. Source roots *might* be nested: in this case, a file belongs to
@ -26,9 +30,6 @@ pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf)
/// source root, and the analyzer does not know the root path of the source root at
/// all. So, a file from one source root can't refer to a file in another source
/// root by path.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct SourceRootId(pub u32);
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SourceRoot {
/// Sysroot or crates.io library.
@ -285,20 +286,39 @@ pub struct CrateData {
/// For purposes of analysis, crates are anonymous (only names in
/// `Dependency` matters), this name should only be used for UI.
pub display_name: Option<CrateDisplayName>,
pub cfg_options: CfgOptions,
pub cfg_options: Arc<CfgOptions>,
/// The cfg options that could be used by the crate
pub potential_cfg_options: Option<CfgOptions>,
pub potential_cfg_options: Option<Arc<CfgOptions>>,
pub env: Env,
pub dependencies: Vec<Dependency>,
pub origin: CrateOrigin,
pub is_proc_macro: bool,
}
#[derive(Default, Debug, Clone, PartialEq, Eq)]
#[derive(Default, Clone, PartialEq, Eq)]
pub struct Env {
entries: FxHashMap<String, String>,
}
impl fmt::Debug for Env {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
struct EnvDebug<'s>(Vec<(&'s String, &'s String)>);
impl fmt::Debug for EnvDebug<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_map().entries(self.0.iter().copied()).finish()
}
}
f.debug_struct("Env")
.field("entries", &{
let mut entries: Vec<_> = self.entries.iter().collect();
entries.sort();
EnvDebug(entries)
})
.finish()
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Dependency {
pub crate_id: CrateId,
@ -328,12 +348,13 @@ impl CrateGraph {
edition: Edition,
display_name: Option<CrateDisplayName>,
version: Option<String>,
cfg_options: CfgOptions,
potential_cfg_options: Option<CfgOptions>,
env: Env,
cfg_options: Arc<CfgOptions>,
potential_cfg_options: Option<Arc<CfgOptions>>,
mut env: Env,
is_proc_macro: bool,
origin: CrateOrigin,
) -> CrateId {
env.entries.shrink_to_fit();
let data = CrateData {
root_file_id,
edition,
@ -650,16 +671,24 @@ impl FromIterator<(String, String)> for Env {
}
impl Env {
pub fn set(&mut self, env: &str, value: String) {
self.entries.insert(env.to_owned(), value);
pub fn set(&mut self, env: &str, value: impl Into<String>) {
self.entries.insert(env.to_owned(), value.into());
}
pub fn get(&self, env: &str) -> Option<String> {
self.entries.get(env).cloned()
}
pub fn iter(&self) -> impl Iterator<Item = (&str, &str)> {
self.entries.iter().map(|(k, v)| (k.as_str(), v.as_str()))
pub fn extend_from_other(&mut self, other: &Env) {
self.entries.extend(other.entries.iter().map(|(x, y)| (x.to_owned(), y.to_owned())));
}
}
impl From<Env> for Vec<(String, String)> {
fn from(env: Env) -> Vec<(String, String)> {
let mut entries: Vec<_> = env.entries.into_iter().collect();
entries.sort();
entries
}
}

View file

@ -45,7 +45,7 @@ pub trait Upcast<T: ?Sized> {
pub const DEFAULT_FILE_TEXT_LRU_CAP: usize = 16;
pub const DEFAULT_PARSE_LRU_CAP: usize = 128;
pub const DEFAULT_BORROWCK_LRU_CAP: usize = 1024;
pub const DEFAULT_BORROWCK_LRU_CAP: usize = 2024;
pub trait FileLoader {
/// Text of the file.
@ -83,7 +83,8 @@ fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option<ReleaseC
fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
let _p = tracing::span!(tracing::Level::INFO, "parse_query", ?file_id).entered();
let text = db.file_text(file_id);
SourceFile::parse(&text)
// FIXME: Edition based parsing
SourceFile::parse(&text, span::Edition::CURRENT)
}
/// We don't want to give HIR knowledge of source roots, hence we extract these

View file

@ -58,13 +58,6 @@ impl CfgOptions {
self.enabled.insert(CfgAtom::KeyValue { key, value });
}
pub fn difference<'a>(
&'a self,
other: &'a CfgOptions,
) -> impl Iterator<Item = &'a CfgAtom> + 'a {
self.enabled.difference(&other.enabled)
}
pub fn apply_diff(&mut self, diff: CfgDiff) {
for atom in diff.enable {
self.enabled.insert(atom);

View file

@ -1,12 +1,12 @@
use arbitrary::{Arbitrary, Unstructured};
use expect_test::{expect, Expect};
use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY};
use syntax::{ast, AstNode};
use syntax::{ast, AstNode, Edition};
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
fn assert_parse_result(input: &str, expected: CfgExpr) {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let cfg = CfgExpr::parse(&tt);
@ -14,7 +14,7 @@ fn assert_parse_result(input: &str, expected: CfgExpr) {
}
fn check_dnf(input: &str, expect: Expect) {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let cfg = CfgExpr::parse(&tt);
@ -23,7 +23,7 @@ fn check_dnf(input: &str, expect: Expect) {
}
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let cfg = CfgExpr::parse(&tt);
@ -34,7 +34,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
#[track_caller]
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let cfg = CfgExpr::parse(&tt);

View file

@ -4,12 +4,13 @@
use std::{
ffi::OsString,
fmt, io,
marker::PhantomData,
path::PathBuf,
process::{ChildStderr, ChildStdout, Command, Stdio},
};
use command_group::{CommandGroup, GroupChild};
use crossbeam_channel::{unbounded, Receiver, Sender};
use crossbeam_channel::Sender;
use stdx::process::streaming_output;
/// Cargo output is structured as a one JSON per line. This trait abstracts parsing one line of
@ -99,10 +100,10 @@ pub(crate) struct CommandHandle<T> {
/// a read syscall dropping and therefore terminating the process is our best option.
child: JodGroupChild,
thread: stdx::thread::JoinHandle<io::Result<(bool, String)>>,
pub(crate) receiver: Receiver<T>,
program: OsString,
arguments: Vec<OsString>,
current_dir: Option<PathBuf>,
_phantom: PhantomData<T>,
}
impl<T> fmt::Debug for CommandHandle<T> {
@ -116,7 +117,7 @@ impl<T> fmt::Debug for CommandHandle<T> {
}
impl<T: ParseFromLine> CommandHandle<T> {
pub(crate) fn spawn(mut command: Command) -> std::io::Result<Self> {
pub(crate) fn spawn(mut command: Command, sender: Sender<T>) -> std::io::Result<Self> {
command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null());
let mut child = command.group_spawn().map(JodGroupChild)?;
@ -127,13 +128,12 @@ impl<T: ParseFromLine> CommandHandle<T> {
let stdout = child.0.inner().stdout.take().unwrap();
let stderr = child.0.inner().stderr.take().unwrap();
let (sender, receiver) = unbounded();
let actor = CargoActor::<T>::new(sender, stdout, stderr);
let thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker)
.name("CommandHandle".to_owned())
.spawn(move || actor.run())
.expect("failed to spawn thread");
Ok(CommandHandle { program, arguments, current_dir, child, thread, receiver })
Ok(CommandHandle { program, arguments, current_dir, child, thread, _phantom: PhantomData })
}
pub(crate) fn cancel(mut self) {

View file

@ -41,19 +41,50 @@ pub enum InvocationLocation {
Workspace,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CargoOptions {
pub target_triples: Vec<String>,
pub all_targets: bool,
pub no_default_features: bool,
pub all_features: bool,
pub features: Vec<String>,
pub extra_args: Vec<String>,
pub extra_env: FxHashMap<String, String>,
pub target_dir: Option<Utf8PathBuf>,
}
impl CargoOptions {
fn apply_on_command(&self, cmd: &mut Command) {
for target in &self.target_triples {
cmd.args(["--target", target.as_str()]);
}
if self.all_targets {
cmd.arg("--all-targets");
}
if self.all_features {
cmd.arg("--all-features");
} else {
if self.no_default_features {
cmd.arg("--no-default-features");
}
if !self.features.is_empty() {
cmd.arg("--features");
cmd.arg(self.features.join(" "));
}
}
if let Some(target_dir) = &self.target_dir {
cmd.arg("--target-dir").arg(target_dir);
}
cmd.envs(&self.extra_env);
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum FlycheckConfig {
CargoCommand {
command: String,
target_triples: Vec<String>,
all_targets: bool,
no_default_features: bool,
all_features: bool,
features: Vec<String>,
extra_args: Vec<String>,
extra_env: FxHashMap<String, String>,
options: CargoOptions,
ansi_color_output: bool,
target_dir: Option<Utf8PathBuf>,
},
CustomCommand {
command: String,
@ -184,6 +215,8 @@ struct FlycheckActor {
/// have to wrap sub-processes output handling in a thread and pass messages
/// back over a channel.
command_handle: Option<CommandHandle<CargoCheckMessage>>,
/// The receiver side of the channel mentioned above.
command_receiver: Option<Receiver<CargoCheckMessage>>,
}
enum Event {
@ -209,6 +242,7 @@ impl FlycheckActor {
sysroot_root,
root: workspace_root,
command_handle: None,
command_receiver: None,
}
}
@ -217,14 +251,13 @@ impl FlycheckActor {
}
fn next_event(&self, inbox: &Receiver<StateChange>) -> Option<Event> {
let check_chan = self.command_handle.as_ref().map(|cargo| &cargo.receiver);
if let Ok(msg) = inbox.try_recv() {
// give restarts a preference so check outputs don't block a restart or stop
return Some(Event::RequestStateChange(msg));
}
select! {
recv(inbox) -> msg => msg.ok().map(Event::RequestStateChange),
recv(check_chan.unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())),
recv(self.command_receiver.as_ref().unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())),
}
}
@ -253,10 +286,12 @@ impl FlycheckActor {
let formatted_command = format!("{:?}", command);
tracing::debug!(?command, "will restart flycheck");
match CommandHandle::spawn(command) {
let (sender, receiver) = unbounded();
match CommandHandle::spawn(command, sender) {
Ok(command_handle) => {
tracing::debug!(command = formatted_command, "did restart flycheck");
tracing::debug!(command = formatted_command, "did restart flycheck");
self.command_handle = Some(command_handle);
self.command_receiver = Some(receiver);
self.report_progress(Progress::DidStart);
}
Err(error) => {
@ -272,13 +307,15 @@ impl FlycheckActor {
// Watcher finished
let command_handle = self.command_handle.take().unwrap();
self.command_receiver.take();
let formatted_handle = format!("{:?}", command_handle);
let res = command_handle.join();
if res.is_err() {
if let Err(error) = &res {
tracing::error!(
"Flycheck failed to run the following command: {}",
formatted_handle
"Flycheck failed to run the following command: {}, error={}",
formatted_handle,
error
);
}
self.report_progress(Progress::DidFinish(res));
@ -332,18 +369,7 @@ impl FlycheckActor {
saved_file: Option<&AbsPath>,
) -> Option<Command> {
let (mut cmd, args) = match &self.config {
FlycheckConfig::CargoCommand {
command,
target_triples,
no_default_features,
all_targets,
all_features,
extra_args,
features,
extra_env,
ansi_color_output,
target_dir,
} => {
FlycheckConfig::CargoCommand { command, options, ansi_color_output } => {
let mut cmd = Command::new(Tool::Cargo.path());
if let Some(sysroot_root) = &self.sysroot_root {
cmd.env("RUSTUP_TOOLCHAIN", AsRef::<std::path::Path>::as_ref(sysroot_root));
@ -365,28 +391,8 @@ impl FlycheckActor {
cmd.arg("--manifest-path");
cmd.arg(self.root.join("Cargo.toml"));
for target in target_triples {
cmd.args(["--target", target.as_str()]);
}
if *all_targets {
cmd.arg("--all-targets");
}
if *all_features {
cmd.arg("--all-features");
} else {
if *no_default_features {
cmd.arg("--no-default-features");
}
if !features.is_empty() {
cmd.arg("--features");
cmd.arg(features.join(" "));
}
}
if let Some(target_dir) = target_dir {
cmd.arg("--target-dir").arg(target_dir);
}
cmd.envs(extra_env);
(cmd, extra_args.clone())
options.apply_on_command(&mut cmd);
(cmd, options.extra_args.clone())
}
FlycheckConfig::CustomCommand {
command,

View file

@ -3,11 +3,15 @@
use std::process::Command;
use crossbeam_channel::Receiver;
use crossbeam_channel::Sender;
use paths::AbsPath;
use serde::Deserialize;
use toolchain::Tool;
use crate::command::{CommandHandle, ParseFromLine};
use crate::{
command::{CommandHandle, ParseFromLine},
CargoOptions,
};
#[derive(Debug, Deserialize)]
#[serde(tag = "event", rename_all = "camelCase")]
@ -51,30 +55,34 @@ impl ParseFromLine for CargoTestMessage {
#[derive(Debug)]
pub struct CargoTestHandle {
handle: CommandHandle<CargoTestMessage>,
_handle: CommandHandle<CargoTestMessage>,
}
// Example of a cargo test command:
// cargo test --workspace --no-fail-fast -- module::func -Z unstable-options --format=json
impl CargoTestHandle {
pub fn new(path: Option<&str>) -> std::io::Result<Self> {
pub fn new(
path: Option<&str>,
options: CargoOptions,
root: &AbsPath,
sender: Sender<CargoTestMessage>,
) -> std::io::Result<Self> {
let mut cmd = Command::new(Tool::Cargo.path());
cmd.env("RUSTC_BOOTSTRAP", "1");
cmd.arg("test");
cmd.arg("--workspace");
// --no-fail-fast is needed to ensure that all requested tests will run
cmd.arg("--no-fail-fast");
cmd.arg("--manifest-path");
cmd.arg(root.join("Cargo.toml"));
options.apply_on_command(&mut cmd);
cmd.arg("--");
if let Some(path) = path {
cmd.arg(path);
}
cmd.args(["-Z", "unstable-options"]);
cmd.arg("--format=json");
Ok(Self { handle: CommandHandle::spawn(cmd)? })
}
pub fn receiver(&self) -> &Receiver<CargoTestMessage> {
&self.handle.receiver
Ok(Self { _handle: CommandHandle::spawn(cmd, sender)? })
}
}

View file

@ -54,7 +54,7 @@ test-utils.workspace = true
test-fixture.workspace = true
[features]
in-rust-tree = []
in-rust-tree = ["hir-expand/in-rust-tree"]
[lints]
workspace = true

View file

@ -5,7 +5,7 @@ pub mod builtin;
#[cfg(test)]
mod tests;
use std::{hash::Hash, ops, slice::Iter as SliceIter};
use std::{borrow::Cow, hash::Hash, ops, slice::Iter as SliceIter};
use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions};
@ -141,6 +141,10 @@ impl Attrs {
}
}
pub fn cfgs(&self) -> impl Iterator<Item = CfgExpr> + '_ {
self.by_key("cfg").tt_values().map(CfgExpr::parse)
}
pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> bool {
match self.cfg() {
None => true,
@ -569,6 +573,10 @@ impl<'attr> AttrQuery<'attr> {
self.attrs().find_map(|attr| attr.string_value())
}
pub fn string_value_unescape(self) -> Option<Cow<'attr, str>> {
self.attrs().find_map(|attr| attr.string_value_unescape())
}
pub fn exists(self) -> bool {
self.attrs().next().is_some()
}

View file

@ -11,7 +11,7 @@ use syntax::{ast, AstNode, TextRange};
use crate::attr::{DocAtom, DocExpr};
fn assert_parse_result(input: &str, expected: DocExpr) {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let source_file = ast::SourceFile::parse(input, span::Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
let tt = syntax_node_to_token_tree(

View file

@ -510,6 +510,7 @@ pub struct ConstData {
pub type_ref: Interned<TypeRef>,
pub visibility: RawVisibility,
pub rustc_allow_incoherent_impl: bool,
pub has_body: bool,
}
impl ConstData {
@ -533,6 +534,7 @@ impl ConstData {
type_ref: konst.type_ref.clone(),
visibility,
rustc_allow_incoherent_impl,
has_body: konst.has_body,
})
}
}
@ -737,7 +739,7 @@ impl<'a> AssocItemCollector<'a> {
&AstIdWithPath::new(file_id, ast_id, Clone::clone(path)),
ctxt,
expand_to,
self.expander.module.krate(),
self.expander.krate(),
resolver,
) {
Ok(Some(call_id)) => {

View file

@ -26,7 +26,7 @@ use crate::{
tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree},
type_ref::TypeRef,
visibility::RawVisibility,
EnumId, EnumVariantId, LocalFieldId, LocalModuleId, Lookup, StructId, UnionId,
EnumId, EnumVariantId, LocalFieldId, LocalModuleId, Lookup, StructId, UnionId, VariantId,
};
/// Note that we use `StructData` for unions as well!
@ -191,8 +191,6 @@ impl StructData {
let krate = loc.container.krate;
let item_tree = loc.id.item_tree(db);
let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
let cfg_options = db.crate_graph()[krate].cfg_options.clone();
let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into());
let mut flags = StructFlags::NO_FLAGS;
@ -219,7 +217,7 @@ impl StructData {
loc.id.file_id(),
loc.container.local_id,
&item_tree,
&cfg_options,
&db.crate_graph()[krate].cfg_options,
&strukt.fields,
None,
);
@ -248,8 +246,6 @@ impl StructData {
let krate = loc.container.krate;
let item_tree = loc.id.item_tree(db);
let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
let cfg_options = db.crate_graph()[krate].cfg_options.clone();
let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into());
let mut flags = StructFlags::NO_FLAGS;
if attrs.by_key("rustc_has_incoherent_inherent_impls").exists() {
@ -266,7 +262,7 @@ impl StructData {
loc.id.file_id(),
loc.container.local_id,
&item_tree,
&cfg_options,
&db.crate_graph()[krate].cfg_options,
&union.fields,
None,
);
@ -338,7 +334,6 @@ impl EnumVariantData {
let container = loc.parent.lookup(db).container;
let krate = container.krate;
let item_tree = loc.id.item_tree(db);
let cfg_options = db.crate_graph()[krate].cfg_options.clone();
let variant = &item_tree[loc.id.value];
let (var_data, diagnostics) = lower_fields(
@ -347,7 +342,7 @@ impl EnumVariantData {
loc.id.file_id(),
container.local_id,
&item_tree,
&cfg_options,
&db.crate_graph()[krate].cfg_options,
&variant.fields,
Some(item_tree[loc.parent.lookup(db).id.value].visibility),
);
@ -383,6 +378,15 @@ impl VariantData {
VariantData::Unit => StructKind::Unit,
}
}
#[allow(clippy::self_named_constructors)]
pub(crate) fn variant_data(db: &dyn DefDatabase, id: VariantId) -> Arc<VariantData> {
match id {
VariantId::StructId(it) => db.struct_data(it).variant_data.clone(),
VariantId::EnumVariantId(it) => db.enum_variant_data(it).variant_data.clone(),
VariantId::UnionId(it) => db.union_data(it).variant_data.clone(),
}
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]

View file

@ -12,7 +12,7 @@ use crate::{
attr::{Attrs, AttrsWithOwner},
body::{scope::ExprScopes, Body, BodySourceMap},
data::{
adt::{EnumData, EnumVariantData, StructData},
adt::{EnumData, EnumVariantData, StructData, VariantData},
ConstData, ExternCrateDeclData, FunctionData, ImplData, Macro2Data, MacroRulesData,
ProcMacroData, StaticData, TraitAliasData, TraitData, TypeAliasData,
},
@ -127,6 +127,9 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
id: EnumVariantId,
) -> (Arc<EnumVariantData>, DefDiagnostics);
#[salsa::transparent]
#[salsa::invoke(VariantData::variant_data)]
fn variant_data(&self, id: VariantId) -> Arc<VariantData>;
#[salsa::transparent]
#[salsa::invoke(ImplData::impl_data_query)]
fn impl_data(&self, e: ImplId) -> Arc<ImplData>;

View file

@ -11,6 +11,7 @@ use hir_expand::{
};
use limit::Limit;
use syntax::{ast, Parse};
use triomphe::Arc;
use crate::{
attr::Attrs, db::DefDatabase, lower::LowerCtx, path::Path, AsMacroCall, MacroId, ModuleId,
@ -19,9 +20,8 @@ use crate::{
#[derive(Debug)]
pub struct Expander {
cfg_options: CfgOptions,
cfg_options: Arc<CfgOptions>,
span_map: OnceCell<SpanMap>,
krate: CrateId,
current_file_id: HirFileId,
pub(crate) module: ModuleId,
/// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached.
@ -45,10 +45,13 @@ impl Expander {
recursion_limit,
cfg_options: db.crate_graph()[module.krate].cfg_options.clone(),
span_map: OnceCell::new(),
krate: module.krate,
}
}
pub fn krate(&self) -> CrateId {
self.module.krate
}
pub fn enter_expand<T: ast::AstNode>(
&mut self,
db: &dyn DefDatabase,
@ -112,7 +115,7 @@ impl Expander {
pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
Attrs::filter(
db,
self.krate,
self.krate(),
RawAttrs::new(
db.upcast(),
owner,

View file

@ -30,6 +30,8 @@ pub fn find_path(
find_path_inner(FindPathCtx { db, prefixed: None, prefer_no_std, prefer_prelude }, item, from)
}
/// Find a path that can be used to refer to a certain item. This can depend on
/// *from where* you're referring to the item, hence the `from` parameter.
pub fn find_path_prefixed(
db: &dyn DefDatabase,
item: ItemInNs,
@ -255,7 +257,7 @@ fn find_in_scope(
item: ItemInNs,
) -> Option<Name> {
def_map.with_ancestor_maps(db, from.local_id, &mut |def_map, local_id| {
def_map[local_id].scope.name_of(item).map(|(name, _, _)| name.clone())
def_map[local_id].scope.names_of(item, |name, _, _| Some(name.clone()))
})
}
@ -608,7 +610,8 @@ mod tests {
) {
let (db, pos) = TestDB::with_position(ra_fixture);
let module = db.module_at_position(pos);
let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};"));
let parsed_path_file =
syntax::SourceFile::parse(&format!("use {path};"), span::Edition::CURRENT);
let ast_path =
parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap();
let mod_path = ModPath::from_src(&db, ast_path, &mut |range| {

View file

@ -3,13 +3,15 @@
//! generic parameters. See also the `Generics` type and the `generics_of` query
//! in rustc.
use std::ops;
use either::Either;
use hir_expand::{
name::{AsName, Name},
ExpandResult,
};
use intern::Interned;
use la_arena::{Arena, Idx};
use la_arena::Arena;
use once_cell::unsync::Lazy;
use stdx::impl_from;
use syntax::ast::{self, HasGenericParams, HasName, HasTypeBounds};
@ -23,12 +25,14 @@ use crate::{
nameres::{DefMap, MacroSubNs},
type_ref::{ConstRef, LifetimeRef, TypeBound, TypeRef},
AdtId, ConstParamId, GenericDefId, HasModule, ItemTreeLoc, LifetimeParamId,
LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId,
LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId,
};
/// Data about a generic type parameter (to a function, struct, impl, ...).
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub struct TypeParamData {
/// [`None`] only if the type ref is an [`TypeRef::ImplTrait`]. FIXME: Might be better to just
/// make it always be a value, giving impl trait a special name.
pub name: Option<Name>,
pub default: Option<Interned<TypeRef>>,
pub provenance: TypeParamProvenance,
@ -156,6 +160,20 @@ pub struct GenericParams {
pub where_predicates: Box<[WherePredicate]>,
}
impl ops::Index<LocalTypeOrConstParamId> for GenericParams {
type Output = TypeOrConstParamData;
fn index(&self, index: LocalTypeOrConstParamId) -> &TypeOrConstParamData {
&self.type_or_consts[index]
}
}
impl ops::Index<LocalLifetimeParamId> for GenericParams {
type Output = LifetimeParamData;
fn index(&self, index: LocalLifetimeParamId) -> &LifetimeParamData {
&self.lifetimes[index]
}
}
/// A single predicate from a where clause, i.e. `where Type: Trait`. Combined
/// where clauses like `where T: Foo + Bar` are turned into multiple of these.
/// It might still result in multiple actual predicates though, because of
@ -197,7 +215,7 @@ impl GenericParamsCollector {
lower_ctx: &LowerCtx<'_>,
node: &dyn HasGenericParams,
add_param_attrs: impl FnMut(
Either<Idx<TypeOrConstParamData>, Idx<LifetimeParamData>>,
Either<LocalTypeOrConstParamId, LocalLifetimeParamId>,
ast::GenericParam,
),
) {
@ -225,7 +243,7 @@ impl GenericParamsCollector {
lower_ctx: &LowerCtx<'_>,
params: ast::GenericParamList,
mut add_param_attrs: impl FnMut(
Either<Idx<TypeOrConstParamData>, Idx<LifetimeParamData>>,
Either<LocalTypeOrConstParamId, LocalLifetimeParamId>,
ast::GenericParam,
),
) {
@ -414,16 +432,16 @@ impl GenericParams {
}
/// Iterator of type_or_consts field
pub fn iter(
pub fn iter_type_or_consts(
&self,
) -> impl DoubleEndedIterator<Item = (Idx<TypeOrConstParamData>, &TypeOrConstParamData)> {
) -> impl DoubleEndedIterator<Item = (LocalTypeOrConstParamId, &TypeOrConstParamData)> {
self.type_or_consts.iter()
}
/// Iterator of lifetimes field
pub fn iter_lt(
&self,
) -> impl DoubleEndedIterator<Item = (Idx<LifetimeParamData>, &LifetimeParamData)> {
) -> impl DoubleEndedIterator<Item = (LocalLifetimeParamId, &LifetimeParamData)> {
self.lifetimes.iter()
}

View file

@ -277,13 +277,43 @@ impl ItemScope {
ItemInNs::Types(def) => self.types.iter().find_map(|(name, &(other_def, vis, i))| {
(other_def == def).then_some((name, vis, i.is_none()))
}),
ItemInNs::Values(def) => self.values.iter().find_map(|(name, &(other_def, vis, i))| {
(other_def == def).then_some((name, vis, i.is_none()))
}),
}
}
/// XXX: this is O(N) rather than O(1), try to not introduce new usages.
pub(crate) fn names_of<T>(
&self,
item: ItemInNs,
mut cb: impl FnMut(&Name, Visibility, bool) -> Option<T>,
) -> Option<T> {
match item {
ItemInNs::Macros(def) => self
.macros
.iter()
.filter_map(|(name, &(other_def, vis, i))| {
(other_def == def).then_some((name, vis, i.is_none()))
})
.find_map(|(a, b, c)| cb(a, b, c)),
ItemInNs::Types(def) => self
.types
.iter()
.filter_map(|(name, &(other_def, vis, i))| {
(other_def == def).then_some((name, vis, i.is_none()))
})
.find_map(|(a, b, c)| cb(a, b, c)),
ItemInNs::Values(def) => self
.values
.iter()
.filter_map(|(name, &(other_def, vis, i))| {
(other_def == def).then_some((name, vis, i.is_none()))
})
.find_map(|(a, b, c)| cb(a, b, c)),
}
}
pub(crate) fn traits(&self) -> impl Iterator<Item = TraitId> + '_ {
self.types
.values()

View file

@ -716,6 +716,7 @@ pub struct Const {
pub visibility: RawVisibilityId,
pub type_ref: Interned<TypeRef>,
pub ast_id: FileAstId<ast::Const>,
pub has_body: bool,
}
#[derive(Debug, Clone, Eq, PartialEq)]

View file

@ -446,7 +446,7 @@ impl<'a> Ctx<'a> {
let type_ref = self.lower_type_ref_opt(konst.ty());
let visibility = self.lower_visibility(konst);
let ast_id = self.source_ast_id_map.ast_id(konst);
let res = Const { name, visibility, type_ref, ast_id };
let res = Const { name, visibility, type_ref, ast_id, has_body: konst.body().is_some() };
id(self.data().consts.alloc(res))
}

View file

@ -357,7 +357,7 @@ impl Printer<'_> {
wln!(self, "}}");
}
ModItem::Const(it) => {
let Const { name, visibility, type_ref, ast_id } = &self.tree[it];
let Const { name, visibility, type_ref, ast_id, has_body: _ } = &self.tree[it];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
w!(self, "const ");

View file

@ -422,6 +422,10 @@ impl ModuleId {
}
}
pub fn crate_def_map(self, db: &dyn DefDatabase) -> Arc<DefMap> {
db.crate_def_map(self.krate)
}
pub fn krate(self) -> CrateId {
self.krate
}
@ -438,6 +442,8 @@ impl ModuleId {
})
}
/// Returns the module containing `self`, either the parent `mod`, or the module (or block) containing
/// the block, if `self` corresponds to a block expression.
pub fn containing_module(self, db: &dyn DefDatabase) -> Option<ModuleId> {
self.def_map(db).containing_module(self.local_id)
}
@ -929,6 +935,18 @@ impl GenericDefId {
GenericDefId::EnumVariantId(_) => (FileId::BOGUS.into(), None),
}
}
pub fn assoc_trait_container(self, db: &dyn DefDatabase) -> Option<TraitId> {
match match self {
GenericDefId::FunctionId(f) => f.lookup(db).container,
GenericDefId::TypeAliasId(t) => t.lookup(db).container,
GenericDefId::ConstId(c) => c.lookup(db).container,
_ => return None,
} {
ItemContainerId::TraitId(trait_) => Some(trait_),
_ => None,
}
}
}
impl From<AssocItemId> for GenericDefId {

View file

@ -610,6 +610,10 @@ struct Foo {
field1: i32,
#[cfg(never)]
field2: (),
#[cfg(feature = "never")]
field3: (),
#[cfg(not(feature = "never"))]
field4: (),
}
#[derive(Default)]
enum Bar {
@ -618,12 +622,16 @@ enum Bar {
Bar,
}
"#,
expect![[r#"
expect![[r##"
#[derive(Default)]
struct Foo {
field1: i32,
#[cfg(never)]
field2: (),
#[cfg(feature = "never")]
field3: (),
#[cfg(not(feature = "never"))]
field4: (),
}
#[derive(Default)]
enum Bar {
@ -635,7 +643,7 @@ enum Bar {
impl < > $crate::default::Default for Foo< > where {
fn default() -> Self {
Foo {
field1: $crate::default::Default::default(),
field1: $crate::default::Default::default(), field4: $crate::default::Default::default(),
}
}
}
@ -643,6 +651,6 @@ impl < > $crate::default::Default for Bar< > where {
fn default() -> Self {
Bar::Bar
}
}"#]],
}"##]],
);
}

View file

@ -316,8 +316,11 @@ impl ProcMacroExpander for IdentityWhenValidProcMacroExpander {
_: Span,
_: Span,
) -> Result<Subtree, ProcMacroExpansionError> {
let (parse, _) =
::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems);
let (parse, _) = ::mbe::token_tree_to_syntax_node(
subtree,
::mbe::TopEntryPoint::MacroItems,
span::Edition::CURRENT,
);
if parse.errors().is_empty() {
Ok(subtree.clone())
} else {

View file

@ -534,8 +534,7 @@ impl DefCollector<'_> {
Edition::Edition2015 => name![rust_2015],
Edition::Edition2018 => name![rust_2018],
Edition::Edition2021 => name![rust_2021],
// FIXME: update this when rust_2024 exists
Edition::Edition2024 => name![rust_2021],
Edition::Edition2024 => name![rust_2024],
};
let path_kind = match self.def_map.data.edition {
@ -1918,7 +1917,7 @@ impl ModCollector<'_, '_> {
}
fn collect_module(&mut self, module_id: FileItemTreeId<Mod>, attrs: &Attrs) {
let path_attr = attrs.by_key("path").string_value();
let path_attr = attrs.by_key("path").string_value_unescape();
let is_macro_use = attrs.by_key("macro_use").exists();
let module = &self.item_tree[module_id];
match &module.kind {
@ -1932,7 +1931,8 @@ impl ModCollector<'_, '_> {
module_id,
);
let Some(mod_dir) = self.mod_dir.descend_into_definition(&module.name, path_attr)
let Some(mod_dir) =
self.mod_dir.descend_into_definition(&module.name, path_attr.as_deref())
else {
return;
};
@ -1953,8 +1953,12 @@ impl ModCollector<'_, '_> {
ModKind::Outline => {
let ast_id = AstId::new(self.file_id(), module.ast_id);
let db = self.def_collector.db;
match self.mod_dir.resolve_declaration(db, self.file_id(), &module.name, path_attr)
{
match self.mod_dir.resolve_declaration(
db,
self.file_id(),
&module.name,
path_attr.as_deref(),
) {
Ok((file_id, is_mod_rs, mod_dir)) => {
let item_tree = db.file_item_tree(file_id.into());
let krate = self.def_collector.def_map.krate;

View file

@ -32,9 +32,13 @@ tt.workspace = true
mbe.workspace = true
limit.workspace = true
span.workspace = true
parser.workspace = true
[dev-dependencies]
expect-test = "1.4.0"
[features]
in-rust-tree = ["syntax/in-rust-tree"]
[lints]
workspace = true

View file

@ -1,5 +1,5 @@
//! A higher level attributes based on TokenTree, with also some shortcuts.
use std::{fmt, ops};
use std::{borrow::Cow, fmt, ops};
use base_db::CrateId;
use cfg::CfgExpr;
@ -8,6 +8,7 @@ use intern::Interned;
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
use smallvec::{smallvec, SmallVec};
use span::{Span, SyntaxContextId};
use syntax::unescape;
use syntax::{ast, format_smolstr, match_ast, AstNode, AstToken, SmolStr, SyntaxNode};
use triomphe::ThinArc;
@ -54,8 +55,7 @@ impl RawAttrs {
Attr {
id,
input: Some(Interned::new(AttrInput::Literal(tt::Literal {
// FIXME: Escape quotes from comment content
text: SmolStr::new(format_smolstr!("\"{doc}\"",)),
text: SmolStr::new(format_smolstr!("\"{}\"", Self::escape_chars(doc))),
span,
}))),
path: Interned::new(ModPath::from(crate::name!(doc))),
@ -74,6 +74,10 @@ impl RawAttrs {
RawAttrs { entries }
}
fn escape_chars(s: &str) -> String {
s.replace('\\', r#"\\"#).replace('"', r#"\""#)
}
pub fn from_attrs_owner(
db: &dyn ExpandDatabase,
owner: InFile<&dyn ast::HasAttrs>,
@ -297,6 +301,18 @@ impl Attr {
}
}
pub fn string_value_unescape(&self) -> Option<Cow<'_, str>> {
match self.input.as_deref()? {
AttrInput::Literal(it) => match it.text.strip_prefix('r') {
Some(it) => {
it.trim_matches('#').strip_prefix('"')?.strip_suffix('"').map(Cow::Borrowed)
}
None => it.text.strip_prefix('"')?.strip_suffix('"').and_then(unescape),
},
_ => None,
}
}
/// #[path(ident)]
pub fn single_ident_value(&self) -> Option<&tt::Ident> {
match self.input.as_deref()? {
@ -346,6 +362,33 @@ impl Attr {
}
}
fn unescape(s: &str) -> Option<Cow<'_, str>> {
let mut buf = String::new();
let mut prev_end = 0;
let mut has_error = false;
unescape::unescape_unicode(s, unescape::Mode::Str, &mut |char_range, unescaped_char| match (
unescaped_char,
buf.capacity() == 0,
) {
(Ok(c), false) => buf.push(c),
(Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => {
prev_end = char_range.end
}
(Ok(c), true) => {
buf.reserve_exact(s.len());
buf.push_str(&s[..prev_end]);
buf.push(c);
}
(Err(_), _) => has_error = true,
});
match (has_error, buf.capacity() == 0) {
(true, _) => None,
(false, false) => Some(Cow::Owned(buf)),
(false, true) => Some(Cow::Borrowed(s)),
}
}
pub fn collect_attrs(
owner: &dyn ast::HasAttrs,
) -> impl Iterator<Item = (AttrId, Either<ast::Attr, ast::Comment>)> {

View file

@ -204,7 +204,11 @@ struct BasicAdtInfo {
}
fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandError> {
let (parsed, tm) = &mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems);
let (parsed, tm) = &mbe::token_tree_to_syntax_node(
tt,
mbe::TopEntryPoint::MacroItems,
parser::Edition::CURRENT,
);
let macro_items = ast::MacroItems::cast(parsed.syntax_node())
.ok_or_else(|| ExpandError::other("invalid item definition"))?;
let item = macro_items.items().next().ok_or_else(|| ExpandError::other("no item found"))?;

View file

@ -219,7 +219,7 @@ fn assert_expand(
span: Span,
) -> ExpandResult<tt::Subtree> {
let call_site_span = span_with_call_site_ctxt(db, span, id);
let args = parse_exprs_with_sep(tt, ',', call_site_span);
let args = parse_exprs_with_sep(tt, ',', call_site_span, Edition::CURRENT);
let dollar_crate = dollar_crate(span);
let expanded = match &*args {
[cond, panic_args @ ..] => {

View file

@ -1,57 +1,59 @@
//! Processes out #[cfg] and #[cfg_attr] attributes from the input for the derive macro
use std::iter::Peekable;
use base_db::CrateId;
use cfg::{CfgAtom, CfgExpr};
use rustc_hash::FxHashSet;
use syntax::{
ast::{self, Attr, HasAttrs, Meta, VariantList},
AstNode, NodeOrToken, SyntaxElement, SyntaxNode, T,
AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, T,
};
use tracing::{debug, warn};
use tt::SmolStr;
use crate::{db::ExpandDatabase, proc_macro::ProcMacroKind, MacroCallLoc, MacroDefKind};
fn check_cfg_attr(attr: &Attr, loc: &MacroCallLoc, db: &dyn ExpandDatabase) -> Option<bool> {
fn check_cfg(db: &dyn ExpandDatabase, attr: &Attr, krate: CrateId) -> Option<bool> {
if !attr.simple_name().as_deref().map(|v| v == "cfg")? {
return None;
}
debug!("Evaluating cfg {}", attr);
let cfg = parse_from_attr_meta(attr.meta()?)?;
debug!("Checking cfg {:?}", cfg);
let enabled = db.crate_graph()[loc.krate].cfg_options.check(&cfg) != Some(false);
let enabled = db.crate_graph()[krate].cfg_options.check(&cfg) != Some(false);
Some(enabled)
}
fn check_cfg_attr_attr(attr: &Attr, loc: &MacroCallLoc, db: &dyn ExpandDatabase) -> Option<bool> {
fn check_cfg_attr(db: &dyn ExpandDatabase, attr: &Attr, krate: CrateId) -> Option<bool> {
if !attr.simple_name().as_deref().map(|v| v == "cfg_attr")? {
return None;
}
debug!("Evaluating cfg_attr {}", attr);
let cfg_expr = parse_from_attr_meta(attr.meta()?)?;
debug!("Checking cfg_attr {:?}", cfg_expr);
let enabled = db.crate_graph()[loc.krate].cfg_options.check(&cfg_expr) != Some(false);
let enabled = db.crate_graph()[krate].cfg_options.check(&cfg_expr) != Some(false);
Some(enabled)
}
fn process_has_attrs_with_possible_comma<I: HasAttrs>(
items: impl Iterator<Item = I>,
loc: &MacroCallLoc,
db: &dyn ExpandDatabase,
items: impl Iterator<Item = I>,
krate: CrateId,
remove: &mut FxHashSet<SyntaxElement>,
) -> Option<()> {
for item in items {
let field_attrs = item.attrs();
'attrs: for attr in field_attrs {
if check_cfg_attr(&attr, loc, db).map(|enabled| !enabled).unwrap_or_default() {
debug!("censoring type {:?}", item.syntax());
remove.insert(item.syntax().clone().into());
// We need to remove the , as well
remove_possible_comma(&item, remove);
break 'attrs;
if let Some(enabled) = check_cfg(db, &attr, krate) {
if enabled {
debug!("censoring {:?}", attr.syntax());
remove.insert(attr.syntax().clone().into());
} else {
debug!("censoring {:?}", item.syntax());
remove.insert(item.syntax().clone().into());
// We need to remove the , as well
remove_possible_comma(&item, remove);
break 'attrs;
}
}
if let Some(enabled) = check_cfg_attr_attr(&attr, loc, db) {
if let Some(enabled) = check_cfg_attr(db, &attr, krate) {
if enabled {
debug!("Removing cfg_attr tokens {:?}", attr);
let meta = attr.meta()?;
@ -60,13 +62,13 @@ fn process_has_attrs_with_possible_comma<I: HasAttrs>(
} else {
debug!("censoring type cfg_attr {:?}", item.syntax());
remove.insert(attr.syntax().clone().into());
continue;
}
}
}
}
Some(())
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
enum CfgExprStage {
/// Stripping the CFGExpr part of the attribute
@ -78,6 +80,7 @@ enum CfgExprStage {
// Related Issue: https://github.com/rust-lang/rust-analyzer/issues/10110
EverythingElse,
}
/// This function creates its own set of tokens to remove. To help prevent malformed syntax as input.
fn remove_tokens_within_cfg_attr(meta: Meta) -> Option<FxHashSet<SyntaxElement>> {
let mut remove: FxHashSet<SyntaxElement> = FxHashSet::default();
@ -131,23 +134,28 @@ fn remove_possible_comma(item: &impl AstNode, res: &mut FxHashSet<SyntaxElement>
}
}
fn process_enum(
variants: VariantList,
loc: &MacroCallLoc,
db: &dyn ExpandDatabase,
variants: VariantList,
krate: CrateId,
remove: &mut FxHashSet<SyntaxElement>,
) -> Option<()> {
'variant: for variant in variants.variants() {
for attr in variant.attrs() {
if check_cfg_attr(&attr, loc, db).map(|enabled| !enabled).unwrap_or_default() {
// Rustc does not strip the attribute if it is enabled. So we will leave it
debug!("censoring type {:?}", variant.syntax());
remove.insert(variant.syntax().clone().into());
// We need to remove the , as well
remove_possible_comma(&variant, remove);
continue 'variant;
};
if let Some(enabled) = check_cfg(db, &attr, krate) {
if enabled {
debug!("censoring {:?}", attr.syntax());
remove.insert(attr.syntax().clone().into());
} else {
// Rustc does not strip the attribute if it is enabled. So we will leave it
debug!("censoring type {:?}", variant.syntax());
remove.insert(variant.syntax().clone().into());
// We need to remove the , as well
remove_possible_comma(&variant, remove);
continue 'variant;
}
}
if let Some(enabled) = check_cfg_attr_attr(&attr, loc, db) {
if let Some(enabled) = check_cfg_attr(db, &attr, krate) {
if enabled {
debug!("Removing cfg_attr tokens {:?}", attr);
let meta = attr.meta()?;
@ -156,17 +164,16 @@ fn process_enum(
} else {
debug!("censoring type cfg_attr {:?}", variant.syntax());
remove.insert(attr.syntax().clone().into());
continue;
}
}
}
if let Some(fields) = variant.field_list() {
match fields {
ast::FieldList::RecordFieldList(fields) => {
process_has_attrs_with_possible_comma(fields.fields(), loc, db, remove)?;
process_has_attrs_with_possible_comma(db, fields.fields(), krate, remove)?;
}
ast::FieldList::TupleFieldList(fields) => {
process_has_attrs_with_possible_comma(fields.fields(), loc, db, remove)?;
process_has_attrs_with_possible_comma(db, fields.fields(), krate, remove)?;
}
}
}
@ -175,9 +182,9 @@ fn process_enum(
}
pub(crate) fn process_cfg_attrs(
db: &dyn ExpandDatabase,
node: &SyntaxNode,
loc: &MacroCallLoc,
db: &dyn ExpandDatabase,
) -> Option<FxHashSet<SyntaxElement>> {
// FIXME: #[cfg_eval] is not implemented. But it is not stable yet
let is_derive = match loc.def.kind {
@ -193,36 +200,35 @@ pub(crate) fn process_cfg_attrs(
let item = ast::Item::cast(node.clone())?;
for attr in item.attrs() {
if let Some(enabled) = check_cfg_attr_attr(&attr, loc, db) {
if let Some(enabled) = check_cfg_attr(db, &attr, loc.krate) {
if enabled {
debug!("Removing cfg_attr tokens {:?}", attr);
let meta = attr.meta()?;
let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
remove.extend(removes_from_cfg_attr);
} else {
debug!("censoring type cfg_attr {:?}", item.syntax());
debug!("Removing type cfg_attr {:?}", item.syntax());
remove.insert(attr.syntax().clone().into());
continue;
}
}
}
match item {
ast::Item::Struct(it) => match it.field_list()? {
ast::FieldList::RecordFieldList(fields) => {
process_has_attrs_with_possible_comma(fields.fields(), loc, db, &mut remove)?;
process_has_attrs_with_possible_comma(db, fields.fields(), loc.krate, &mut remove)?;
}
ast::FieldList::TupleFieldList(fields) => {
process_has_attrs_with_possible_comma(fields.fields(), loc, db, &mut remove)?;
process_has_attrs_with_possible_comma(db, fields.fields(), loc.krate, &mut remove)?;
}
},
ast::Item::Enum(it) => {
process_enum(it.variant_list()?, loc, db, &mut remove)?;
process_enum(db, it.variant_list()?, loc.krate, &mut remove)?;
}
ast::Item::Union(it) => {
process_has_attrs_with_possible_comma(
it.record_field_list()?.fields(),
loc,
db,
it.record_field_list()?.fields(),
loc.krate,
&mut remove,
)?;
}
@ -234,10 +240,22 @@ pub(crate) fn process_cfg_attrs(
/// Parses a `cfg` attribute from the meta
fn parse_from_attr_meta(meta: Meta) -> Option<CfgExpr> {
let tt = meta.token_tree()?;
let mut iter = tt.token_trees_and_tokens().skip(1).peekable();
let mut iter = tt
.token_trees_and_tokens()
.filter(is_not_whitespace)
.skip(1)
.take_while(is_not_closing_paren)
.peekable();
next_cfg_expr_from_syntax(&mut iter)
}
fn is_not_closing_paren(element: &NodeOrToken<ast::TokenTree, syntax::SyntaxToken>) -> bool {
!matches!(element, NodeOrToken::Token(token) if (token.kind() == syntax::T![')']))
}
fn is_not_whitespace(element: &NodeOrToken<ast::TokenTree, syntax::SyntaxToken>) -> bool {
!matches!(element, NodeOrToken::Token(token) if (token.kind() == SyntaxKind::WHITESPACE))
}
fn next_cfg_expr_from_syntax<I>(iter: &mut Peekable<I>) -> Option<CfgExpr>
where
I: Iterator<Item = NodeOrToken<ast::TokenTree, syntax::SyntaxToken>>,
@ -256,14 +274,13 @@ where
let Some(NodeOrToken::Node(tree)) = iter.next() else {
return Some(CfgExpr::Invalid);
};
let mut tree_iter = tree.token_trees_and_tokens().skip(1).peekable();
while tree_iter
.peek()
.filter(
|element| matches!(element, NodeOrToken::Token(token) if (token.kind() != syntax::T![')'])),
)
.is_some()
{
let mut tree_iter = tree
.token_trees_and_tokens()
.filter(is_not_whitespace)
.skip(1)
.take_while(is_not_closing_paren)
.peekable();
while tree_iter.peek().is_some() {
let pred = next_cfg_expr_from_syntax(&mut tree_iter);
if let Some(pred) = pred {
preds.push(pred);
@ -310,7 +327,7 @@ mod tests {
use crate::cfg_process::parse_from_attr_meta;
fn check_dnf_from_syntax(input: &str, expect: Expect) {
let parse = SourceFile::parse(input);
let parse = SourceFile::parse(input, span::Edition::CURRENT);
let node = match parse.tree().syntax().descendants().find_map(Attr::cast) {
Some(it) => it,
None => {

View file

@ -3,7 +3,7 @@
use base_db::{salsa, CrateId, FileId, SourceDatabase};
use either::Either;
use limit::Limit;
use mbe::syntax_node_to_token_tree;
use mbe::{syntax_node_to_token_tree, MatchedArmIndex};
use rustc_hash::FxHashSet;
use span::{AstIdMap, Span, SyntaxContextData, SyntaxContextId};
use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T};
@ -175,7 +175,7 @@ pub fn expand_speculative(
};
let censor_cfg =
cfg_process::process_cfg_attrs(speculative_args, &loc, db).unwrap_or_default();
cfg_process::process_cfg_attrs(db, speculative_args, &loc).unwrap_or_default();
let mut fixups = fixup::fixup_syntax(span_map, speculative_args, span);
fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Token(_) => true,
@ -225,43 +225,45 @@ pub fn expand_speculative(
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
let mut speculative_expansion = match loc.def.kind {
MacroDefKind::ProcMacro(expander, _, ast) => {
let span = db.proc_macro_span(ast);
tt.delimiter = tt::Delimiter::invisible_spanned(span);
expander.expand(
db,
loc.def.krate,
loc.krate,
&tt,
attr_arg.as_ref(),
span_with_def_site_ctxt(db, span, actual_macro_call),
span_with_call_site_ctxt(db, span, actual_macro_call),
span_with_mixed_site_ctxt(db, span, actual_macro_call),
)
}
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span)
}
MacroDefKind::Declarative(it) => {
db.decl_macro_expander(loc.krate, it).expand_unhygienic(db, tt, loc.def.krate, span)
}
MacroDefKind::BuiltIn(it, _) => {
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
}
MacroDefKind::BuiltInDerive(it, ..) => {
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
}
MacroDefKind::BuiltInEager(it, _) => {
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
}
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt, span),
};
let mut speculative_expansion =
match loc.def.kind {
MacroDefKind::ProcMacro(expander, _, ast) => {
let span = db.proc_macro_span(ast);
tt.delimiter = tt::Delimiter::invisible_spanned(span);
expander.expand(
db,
loc.def.krate,
loc.krate,
&tt,
attr_arg.as_ref(),
span_with_def_site_ctxt(db, span, actual_macro_call),
span_with_call_site_ctxt(db, span, actual_macro_call),
span_with_mixed_site_ctxt(db, span, actual_macro_call),
)
}
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span)
}
MacroDefKind::Declarative(it) => db
.decl_macro_expander(loc.krate, it)
.expand_unhygienic(db, tt, loc.def.krate, span, loc.def.edition),
MacroDefKind::BuiltIn(it, _) => {
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
}
MacroDefKind::BuiltInDerive(it, ..) => {
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
}
MacroDefKind::BuiltInEager(it, _) => {
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
}
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt, span),
};
let expand_to = loc.expand_to();
fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info);
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
let (node, rev_tmap) =
token_tree_to_syntax_node(&speculative_expansion.value, expand_to, loc.def.edition);
let syntax_node = node.syntax_node();
let token = rev_tmap
@ -309,16 +311,20 @@ fn parse_macro_expansion(
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
let _p = tracing::span!(tracing::Level::INFO, "parse_macro_expansion").entered();
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let edition = loc.def.edition;
let expand_to = loc.expand_to();
let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc);
let mbe::ValueResult { value: (tt, matched_arm), err } =
macro_expand(db, macro_file.macro_call_id, loc);
let (parse, rev_token_map) = token_tree_to_syntax_node(
let (parse, mut rev_token_map) = token_tree_to_syntax_node(
match &tt {
CowArc::Arc(it) => it,
CowArc::Owned(it) => it,
},
expand_to,
edition,
);
rev_token_map.matched_arm = matched_arm;
ExpandResult { value: (parse, Arc::new(rev_token_map)), err }
}
@ -462,7 +468,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
let (mut tt, undo_info) = {
let syntax = item_node.syntax();
let censor_cfg = cfg_process::process_cfg_attrs(syntax, &loc, db).unwrap_or_default();
let censor_cfg = cfg_process::process_cfg_attrs(db, syntax, &loc).unwrap_or_default();
let mut fixups = fixup::fixup_syntax(map.as_ref(), syntax, span);
fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Token(_) => true,
@ -540,11 +546,13 @@ fn macro_expand(
db: &dyn ExpandDatabase,
macro_call_id: MacroCallId,
loc: MacroCallLoc,
) -> ExpandResult<CowArc<tt::Subtree>> {
) -> ExpandResult<(CowArc<tt::Subtree>, MatchedArmIndex)> {
let _p = tracing::span!(tracing::Level::INFO, "macro_expand").entered();
let (ExpandResult { value: tt, err }, span) = match loc.def.kind {
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id).map(CowArc::Arc),
let (ExpandResult { value: (tt, matched_arm), err }, span) = match loc.def.kind {
MacroDefKind::ProcMacro(..) => {
return db.expand_proc_macro(macro_call_id).map(CowArc::Arc).zip_val(None)
}
_ => {
let (macro_arg, undo_info, span) =
db.macro_arg_considering_derives(macro_call_id, &loc.kind);
@ -556,10 +564,10 @@ fn macro_expand(
.decl_macro_expander(loc.def.krate, id)
.expand(db, arg.clone(), macro_call_id, span),
MacroDefKind::BuiltIn(it, _) => {
it.expand(db, macro_call_id, arg, span).map_err(Into::into)
it.expand(db, macro_call_id, arg, span).map_err(Into::into).zip_val(None)
}
MacroDefKind::BuiltInDerive(it, _) => {
it.expand(db, macro_call_id, arg, span).map_err(Into::into)
it.expand(db, macro_call_id, arg, span).map_err(Into::into).zip_val(None)
}
MacroDefKind::BuiltInEager(it, _) => {
// This might look a bit odd, but we do not expand the inputs to eager macros here.
@ -570,7 +578,8 @@ fn macro_expand(
// As such we just return the input subtree here.
let eager = match &loc.kind {
MacroCallKind::FnLike { eager: None, .. } => {
return ExpandResult::ok(CowArc::Arc(macro_arg.clone()));
return ExpandResult::ok(CowArc::Arc(macro_arg.clone()))
.zip_val(None);
}
MacroCallKind::FnLike { eager: Some(eager), .. } => Some(&**eager),
_ => None,
@ -582,12 +591,12 @@ fn macro_expand(
// FIXME: We should report both errors!
res.err = error.clone().or(res.err);
}
res
res.zip_val(None)
}
MacroDefKind::BuiltInAttr(it, _) => {
let mut res = it.expand(db, macro_call_id, arg, span);
fixup::reverse_fixups(&mut res.value, &undo_info);
res
res.zip_val(None)
}
_ => unreachable!(),
};
@ -599,16 +608,18 @@ fn macro_expand(
if !loc.def.is_include() {
// Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt) {
return value.map(|()| {
CowArc::Owned(tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(span),
token_trees: Box::new([]),
return value
.map(|()| {
CowArc::Owned(tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(span),
token_trees: Box::new([]),
})
})
});
.zip_val(matched_arm);
}
}
ExpandResult { value: CowArc::Owned(tt), err }
ExpandResult { value: (CowArc::Owned(tt), matched_arm), err }
}
fn proc_macro_span(db: &dyn ExpandDatabase, ast: AstId<ast::Fn>) -> Span {
@ -668,6 +679,7 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
fn token_tree_to_syntax_node(
tt: &tt::Subtree,
expand_to: ExpandTo,
edition: parser::Edition,
) -> (Parse<SyntaxNode>, ExpansionSpanMap) {
let entry_point = match expand_to {
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
@ -676,7 +688,7 @@ fn token_tree_to_syntax_node(
ExpandTo::Type => mbe::TopEntryPoint::Type,
ExpandTo::Expr => mbe::TopEntryPoint::Expr,
};
mbe::token_tree_to_syntax_node(tt, entry_point)
mbe::token_tree_to_syntax_node(tt, entry_point, edition)
}
fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> {

View file

@ -2,7 +2,8 @@
use std::sync::OnceLock;
use base_db::{CrateId, VersionReq};
use span::{MacroCallId, Span, SyntaxContextId};
use span::{Edition, MacroCallId, Span, SyntaxContextId};
use stdx::TupleExt;
use syntax::{ast, AstNode};
use triomphe::Arc;
@ -30,7 +31,7 @@ impl DeclarativeMacroExpander {
tt: tt::Subtree,
call_id: MacroCallId,
span: Span,
) -> ExpandResult<tt::Subtree> {
) -> ExpandResult<(tt::Subtree, Option<u32>)> {
let loc = db.lookup_intern_macro_call(call_id);
let toolchain = db.toolchain(loc.def.krate);
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
@ -46,7 +47,7 @@ impl DeclarativeMacroExpander {
});
match self.mac.err() {
Some(_) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: span, close: span }),
(tt::Subtree::empty(tt::DelimSpan { open: span, close: span }), None),
ExpandError::MacroDefinition,
),
None => self
@ -56,6 +57,7 @@ impl DeclarativeMacroExpander {
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
new_meta_vars,
span,
loc.def.edition,
)
.map_err(Into::into),
}
@ -67,6 +69,7 @@ impl DeclarativeMacroExpander {
tt: tt::Subtree,
krate: CrateId,
call_site: Span,
def_site_edition: Edition,
) -> ExpandResult<tt::Subtree> {
let toolchain = db.toolchain(krate);
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
@ -85,7 +88,11 @@ impl DeclarativeMacroExpander {
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::MacroDefinition,
),
None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into),
None => self
.mac
.expand(&tt, |_| (), new_meta_vars, call_site, def_site_edition)
.map(TupleExt::head)
.map_err(Into::into),
}
}

View file

@ -396,7 +396,7 @@ mod tests {
#[track_caller]
fn check(ra_fixture: &str, mut expect: Expect) {
let parsed = syntax::SourceFile::parse(ra_fixture);
let parsed = syntax::SourceFile::parse(ra_fixture, span::Edition::CURRENT);
let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
let fixups = super::fixup_syntax(
span_map.as_ref(),
@ -417,7 +417,11 @@ mod tests {
expect.assert_eq(&actual);
// the fixed-up tree should be syntactically valid
let (parse, _) = mbe::token_tree_to_syntax_node(&tt, ::mbe::TopEntryPoint::MacroItems);
let (parse, _) = mbe::token_tree_to_syntax_node(
&tt,
::mbe::TopEntryPoint::MacroItems,
parser::Edition::CURRENT,
);
assert!(
parse.errors().is_empty(),
"parse has syntax errors. parse tree:\n{:#?}",

View file

@ -3,7 +3,7 @@
//! Specifically, it implements a concept of `MacroFile` -- a file whose syntax
//! tree originates not from the text of some `FileId`, but from some macro
//! expansion.
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#![warn(rust_2018_idioms, unused_lifetimes)]
pub mod attrs;

View file

@ -303,6 +303,7 @@ pub mod known {
rust_2015,
rust_2018,
rust_2021,
rust_2024,
v1,
new_display,
new_debug,

View file

@ -61,7 +61,7 @@ test-utils.workspace = true
test-fixture.workspace = true
[features]
in-rust-tree = []
in-rust-tree = ["hir-expand/in-rust-tree"]
[lints]
workspace = true

View file

@ -74,6 +74,10 @@ impl<D> TyBuilder<D> {
(self.data, subst)
}
pub fn build_into_subst(self) -> Substitution {
self.build_internal().1
}
pub fn push(mut self, arg: impl CastTo<GenericArg>) -> Self {
assert!(self.remaining() > 0);
let arg = arg.cast(Interner);
@ -291,7 +295,6 @@ impl TyBuilder<hir_def::AdtId> {
) -> Self {
// Note that we're building ADT, so we never have parent generic parameters.
let defaults = db.generic_defaults(self.data.into());
let dummy_ty = TyKind::Error.intern(Interner).cast(Interner);
for default_ty in defaults.iter().skip(self.vec.len()) {
// NOTE(skip_binders): we only check if the arg type is error type.
if let Some(x) = default_ty.skip_binders().ty(Interner) {
@ -301,13 +304,16 @@ impl TyBuilder<hir_def::AdtId> {
}
}
// Each default can only depend on the previous parameters.
// FIXME: we don't handle const generics here.
let subst_so_far = Substitution::from_iter(
Interner,
self.vec
.iter()
.cloned()
.chain(iter::repeat(dummy_ty.clone()))
.chain(self.param_kinds[self.vec.len()..].iter().map(|it| match it {
ParamKind::Type => TyKind::Error.intern(Interner).cast(Interner),
ParamKind::Lifetime => error_lifetime().cast(Interner),
ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
}))
.take(self.param_kinds.len()),
);
self.vec.push(default_ty.clone().substitute(Interner, &subst_so_far).cast(Interner));

View file

@ -1,6 +1,8 @@
//! Various extensions traits for Chalk types.
use chalk_ir::{cast::Cast, FloatTy, IntTy, Mutability, Scalar, TyVariableKind, UintTy};
use chalk_ir::{
cast::Cast, FloatTy, IntTy, Mutability, Scalar, TyVariableKind, TypeOutlives, UintTy,
};
use hir_def::{
builtin_type::{BuiltinFloat, BuiltinInt, BuiltinType, BuiltinUint},
generics::TypeOrConstParamData,
@ -312,7 +314,7 @@ impl TyExt for Ty {
.generic_predicates(id.parent)
.iter()
.map(|pred| pred.clone().substitute(Interner, &substs))
.filter(|wc| match &wc.skip_binders() {
.filter(|wc| match wc.skip_binders() {
WhereClause::Implemented(tr) => {
&tr.self_type_parameter(Interner) == self
}
@ -320,6 +322,9 @@ impl TyExt for Ty {
alias: AliasTy::Projection(proj),
ty: _,
}) => &proj.self_type_parameter(db) == self,
WhereClause::TypeOutlives(TypeOutlives { ty, lifetime: _ }) => {
ty == self
}
_ => false,
})
.collect::<Vec<_>>();

View file

@ -77,30 +77,32 @@ pub(crate) fn path_to_const(
resolver: &Resolver,
path: &Path,
mode: ParamLoweringMode,
args_lazy: impl FnOnce() -> Generics,
args: impl FnOnce() -> Option<Generics>,
debruijn: DebruijnIndex,
expected_ty: Ty,
) -> Option<Const> {
match resolver.resolve_path_in_value_ns_fully(db.upcast(), path) {
Some(ValueNs::GenericParam(p)) => {
let ty = db.const_param_ty(p);
let args = args_lazy();
let value = match mode {
ParamLoweringMode::Placeholder => {
ConstValue::Placeholder(to_placeholder_idx(db, p.into()))
}
ParamLoweringMode::Variable => match args.param_idx(p.into()) {
Some(it) => ConstValue::BoundVar(BoundVar::new(debruijn, it)),
None => {
never!(
"Generic list doesn't contain this param: {:?}, {:?}, {:?}",
args,
path,
p
);
return None;
ParamLoweringMode::Variable => {
let args = args();
match args.as_ref().and_then(|args| args.type_or_const_param_idx(p.into())) {
Some(it) => ConstValue::BoundVar(BoundVar::new(debruijn, it)),
None => {
never!(
"Generic list doesn't contain this param: {:?}, {:?}, {:?}",
args,
path,
p
);
return None;
}
}
},
}
};
Some(ConstData { ty, value }.intern(Interner))
}
@ -285,7 +287,6 @@ pub(crate) fn eval_to_const(
expr: ExprId,
mode: ParamLoweringMode,
ctx: &mut InferenceContext<'_>,
args: impl FnOnce() -> Generics,
debruijn: DebruijnIndex,
) -> Const {
let db = ctx.db;
@ -304,7 +305,9 @@ pub(crate) fn eval_to_const(
}
if let Expr::Path(p) = &ctx.body.exprs[expr] {
let resolver = &ctx.resolver;
if let Some(c) = path_to_const(db, resolver, p, mode, args, debruijn, infer[expr].clone()) {
if let Some(c) =
path_to_const(db, resolver, p, mode, || ctx.generics(), debruijn, infer[expr].clone())
{
return c;
}
}

View file

@ -43,7 +43,7 @@ mod allow {
}
pub fn incorrect_case(db: &dyn HirDatabase, owner: ModuleDefId) -> Vec<IncorrectCase> {
let _p = tracing::span!(tracing::Level::INFO, "validate_module_item").entered();
let _p = tracing::span!(tracing::Level::INFO, "incorrect_case").entered();
let mut validator = DeclValidator::new(db);
validator.validate_item(owner);
validator.sink

View file

@ -11,6 +11,7 @@ use hir_def::{ItemContainerId, Lookup};
use hir_expand::name;
use itertools::Itertools;
use rustc_hash::FxHashSet;
use rustc_pattern_analysis::constructor::Constructor;
use syntax::{ast, AstNode};
use tracing::debug;
use triomphe::Arc;
@ -190,45 +191,45 @@ impl ExprValidator {
let pattern_arena = Arena::new();
let mut m_arms = Vec::with_capacity(arms.len());
let mut has_lowering_errors = false;
// Note: Skipping the entire diagnostic rather than just not including a faulty match arm is
// preferred to avoid the chance of false positives.
for arm in arms {
if let Some(pat_ty) = self.infer.type_of_pat.get(arm.pat) {
// We only include patterns whose type matches the type
// of the scrutinee expression. If we had an InvalidMatchArmPattern
// diagnostic or similar we could raise that in an else
// block here.
//
// When comparing the types, we also have to consider that rustc
// will automatically de-reference the scrutinee expression type if
// necessary.
//
// FIXME we should use the type checker for this.
if (pat_ty == scrut_ty
|| scrut_ty
.as_reference()
.map(|(match_expr_ty, ..)| match_expr_ty == pat_ty)
.unwrap_or(false))
&& types_of_subpatterns_do_match(arm.pat, &self.body, &self.infer)
{
// If we had a NotUsefulMatchArm diagnostic, we could
// check the usefulness of each pattern as we added it
// to the matrix here.
let pat = self.lower_pattern(&cx, arm.pat, db, &mut has_lowering_errors);
let m_arm = pat_analysis::MatchArm {
pat: pattern_arena.alloc(pat),
has_guard: arm.guard.is_some(),
arm_data: (),
};
m_arms.push(m_arm);
if !has_lowering_errors {
continue;
}
let Some(pat_ty) = self.infer.type_of_pat.get(arm.pat) else {
return;
};
// We only include patterns whose type matches the type
// of the scrutinee expression. If we had an InvalidMatchArmPattern
// diagnostic or similar we could raise that in an else
// block here.
//
// When comparing the types, we also have to consider that rustc
// will automatically de-reference the scrutinee expression type if
// necessary.
//
// FIXME we should use the type checker for this.
if (pat_ty == scrut_ty
|| scrut_ty
.as_reference()
.map(|(match_expr_ty, ..)| match_expr_ty == pat_ty)
.unwrap_or(false))
&& types_of_subpatterns_do_match(arm.pat, &self.body, &self.infer)
{
// If we had a NotUsefulMatchArm diagnostic, we could
// check the usefulness of each pattern as we added it
// to the matrix here.
let pat = self.lower_pattern(&cx, arm.pat, db, &mut has_lowering_errors);
let m_arm = pat_analysis::MatchArm {
pat: pattern_arena.alloc(pat),
has_guard: arm.guard.is_some(),
arm_data: (),
};
m_arms.push(m_arm);
if !has_lowering_errors {
continue;
}
}
// If we can't resolve the type of a pattern, or the pattern type doesn't
// fit the match expression, we skip this diagnostic. Skipping the entire
// diagnostic rather than just not including this match arm is preferred
// to avoid the chance of false positives.
// If the pattern type doesn't fit the match expression, we skip this diagnostic.
cov_mark::hit!(validate_match_bailed_out);
return;
}
@ -266,15 +267,17 @@ impl ExprValidator {
let mut have_errors = false;
let deconstructed_pat = self.lower_pattern(&cx, pat, db, &mut have_errors);
// optimization, wildcard trivially hold
if have_errors || matches!(deconstructed_pat.ctor(), Constructor::Wildcard) {
continue;
}
let match_arm = rustc_pattern_analysis::MatchArm {
pat: pattern_arena.alloc(deconstructed_pat),
has_guard: false,
arm_data: (),
};
if have_errors {
continue;
}
let report = match cx.compute_match_usefulness(&[match_arm], ty.clone()) {
Ok(v) => v,
Err(e) => {
@ -531,8 +534,16 @@ fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResul
fn walk(pat: PatId, body: &Body, infer: &InferenceResult, has_type_mismatches: &mut bool) {
match infer.type_mismatch_for_pat(pat) {
Some(_) => *has_type_mismatches = true,
None if *has_type_mismatches => (),
None => {
body[pat].walk_child_pats(|subpat| walk(subpat, body, infer, has_type_mismatches))
let pat = &body[pat];
if let Pat::ConstBlock(expr) | Pat::Lit(expr) = *pat {
*has_type_mismatches |= infer.type_mismatch_for_expr(expr).is_some();
if *has_type_mismatches {
return;
}
}
pat.walk_child_pats(|subpat| walk(subpat, body, infer, has_type_mismatches))
}
}
}

View file

@ -1,9 +1,9 @@
//! Interface with `rustc_pattern_analysis`.
use std::fmt;
use tracing::debug;
use hir_def::{DefWithBodyId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
use hir_def::{DefWithBodyId, EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
use once_cell::unsync::Lazy;
use rustc_hash::FxHashMap;
use rustc_pattern_analysis::{
constructor::{Constructor, ConstructorSet, VariantVisibility},
@ -36,6 +36,24 @@ pub(crate) type WitnessPat<'p> = rustc_pattern_analysis::pat::WitnessPat<MatchCh
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub(crate) enum Void {}
/// An index type for enum variants. This ranges from 0 to `variants.len()`, whereas `EnumVariantId`
/// can take arbitrary large values (and hence mustn't be used with `IndexVec`/`BitSet`).
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub(crate) struct EnumVariantContiguousIndex(usize);
impl EnumVariantContiguousIndex {
fn from_enum_variant_id(db: &dyn HirDatabase, target_evid: EnumVariantId) -> Self {
// Find the index of this variant in the list of variants.
use hir_def::Lookup;
let i = target_evid.lookup(db.upcast()).index as usize;
EnumVariantContiguousIndex(i)
}
fn to_enum_variant_id(self, db: &dyn HirDatabase, eid: EnumId) -> EnumVariantId {
db.enum_data(eid).variants[self.0].0
}
}
#[derive(Clone)]
pub(crate) struct MatchCheckCtx<'p> {
module: ModuleId,
@ -73,25 +91,27 @@ impl<'p> MatchCheckCtx<'p> {
}
fn is_uninhabited(&self, ty: &Ty) -> bool {
is_ty_uninhabited_from(ty, self.module, self.db)
is_ty_uninhabited_from(self.db, ty, self.module)
}
/// Returns whether the given type is an enum from another crate declared `#[non_exhaustive]`.
fn is_foreign_non_exhaustive_enum(&self, ty: &Ty) -> bool {
match ty.as_adt() {
Some((adt @ hir_def::AdtId::EnumId(_), _)) => {
let has_non_exhaustive_attr =
self.db.attrs(adt.into()).by_key("non_exhaustive").exists();
let is_local = adt.module(self.db.upcast()).krate() == self.module.krate();
has_non_exhaustive_attr && !is_local
}
_ => false,
}
/// Returns whether the given ADT is from another crate declared `#[non_exhaustive]`.
fn is_foreign_non_exhaustive(&self, adt: hir_def::AdtId) -> bool {
let is_local = adt.krate(self.db.upcast()) == self.module.krate();
!is_local && self.db.attrs(adt.into()).by_key("non_exhaustive").exists()
}
fn variant_id_for_adt(ctor: &Constructor<Self>, adt: hir_def::AdtId) -> Option<VariantId> {
fn variant_id_for_adt(
db: &'p dyn HirDatabase,
ctor: &Constructor<Self>,
adt: hir_def::AdtId,
) -> Option<VariantId> {
match ctor {
&Variant(id) => Some(id.into()),
Variant(id) => {
let hir_def::AdtId::EnumId(eid) = adt else {
panic!("bad constructor {ctor:?} for adt {adt:?}")
};
Some(id.to_enum_variant_id(db, eid).into())
}
Struct | UnionField => match adt {
hir_def::AdtId::EnumId(_) => None,
hir_def::AdtId::StructId(id) => Some(id.into()),
@ -175,19 +195,24 @@ impl<'p> MatchCheckCtx<'p> {
ctor = Struct;
arity = 1;
}
&TyKind::Adt(adt, _) => {
&TyKind::Adt(AdtId(adt), _) => {
ctor = match pat.kind.as_ref() {
PatKind::Leaf { .. } if matches!(adt.0, hir_def::AdtId::UnionId(_)) => {
PatKind::Leaf { .. } if matches!(adt, hir_def::AdtId::UnionId(_)) => {
UnionField
}
PatKind::Leaf { .. } => Struct,
PatKind::Variant { enum_variant, .. } => Variant(*enum_variant),
PatKind::Variant { enum_variant, .. } => {
Variant(EnumVariantContiguousIndex::from_enum_variant_id(
self.db,
*enum_variant,
))
}
_ => {
never!();
Wildcard
}
};
let variant = Self::variant_id_for_adt(&ctor, adt.0).unwrap();
let variant = Self::variant_id_for_adt(self.db, &ctor, adt).unwrap();
arity = variant.variant_data(self.db.upcast()).fields().len();
}
_ => {
@ -239,7 +264,7 @@ impl<'p> MatchCheckCtx<'p> {
PatKind::Deref { subpattern: subpatterns.next().unwrap() }
}
TyKind::Adt(adt, substs) => {
let variant = Self::variant_id_for_adt(pat.ctor(), adt.0).unwrap();
let variant = Self::variant_id_for_adt(self.db, pat.ctor(), adt.0).unwrap();
let subpatterns = self
.list_variant_fields(pat.ty(), variant)
.zip(subpatterns)
@ -277,7 +302,7 @@ impl<'p> MatchCheckCtx<'p> {
impl<'p> PatCx for MatchCheckCtx<'p> {
type Error = ();
type Ty = Ty;
type VariantIdx = EnumVariantId;
type VariantIdx = EnumVariantContiguousIndex;
type StrLit = Void;
type ArmData = ();
type PatData = PatData<'p>;
@ -303,7 +328,7 @@ impl<'p> PatCx for MatchCheckCtx<'p> {
// patterns. If we're here we can assume this is a box pattern.
1
} else {
let variant = Self::variant_id_for_adt(ctor, adt).unwrap();
let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
variant.variant_data(self.db.upcast()).fields().len()
}
}
@ -343,25 +368,22 @@ impl<'p> PatCx for MatchCheckCtx<'p> {
let subst_ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
single(subst_ty)
} else {
let variant = Self::variant_id_for_adt(ctor, adt).unwrap();
let (adt, _) = ty.as_adt().unwrap();
let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
let adt_is_local =
variant.module(self.db.upcast()).krate() == self.module.krate();
// Whether we must not match the fields of this variant exhaustively.
let is_non_exhaustive =
self.db.attrs(variant.into()).by_key("non_exhaustive").exists()
&& !adt_is_local;
let visibilities = self.db.field_visibilities(variant);
let is_non_exhaustive = Lazy::new(|| self.is_foreign_non_exhaustive(adt));
let visibilities = Lazy::new(|| self.db.field_visibilities(variant));
self.list_variant_fields(ty, variant)
.map(move |(fid, ty)| {
let is_visible = matches!(adt, hir_def::AdtId::EnumId(..))
|| visibilities[fid]
.is_visible_from(self.db.upcast(), self.module);
let is_visible = || {
matches!(adt, hir_def::AdtId::EnumId(..))
|| visibilities[fid]
.is_visible_from(self.db.upcast(), self.module)
};
let is_uninhabited = self.is_uninhabited(&ty);
let private_uninhabited =
is_uninhabited && (!is_visible || is_non_exhaustive);
is_uninhabited && (!is_visible() || *is_non_exhaustive);
(ty, PrivateUninhabitedField(private_uninhabited))
})
.collect()
@ -413,23 +435,26 @@ impl<'p> PatCx for MatchCheckCtx<'p> {
TyKind::Scalar(Scalar::Char) => unhandled(),
TyKind::Scalar(Scalar::Int(..) | Scalar::Uint(..)) => unhandled(),
TyKind::Array(..) | TyKind::Slice(..) => unhandled(),
TyKind::Adt(AdtId(hir_def::AdtId::EnumId(enum_id)), subst) => {
let enum_data = cx.db.enum_data(*enum_id);
let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive_enum(ty);
&TyKind::Adt(AdtId(adt @ hir_def::AdtId::EnumId(enum_id)), ref subst) => {
let enum_data = cx.db.enum_data(enum_id);
let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive(adt);
if enum_data.variants.is_empty() && !is_declared_nonexhaustive {
ConstructorSet::NoConstructors
} else {
let mut variants = FxHashMap::default();
for &(variant, _) in enum_data.variants.iter() {
let mut variants = FxHashMap::with_capacity_and_hasher(
enum_data.variants.len(),
Default::default(),
);
for (i, &(variant, _)) in enum_data.variants.iter().enumerate() {
let is_uninhabited =
is_enum_variant_uninhabited_from(variant, subst, cx.module, cx.db);
is_enum_variant_uninhabited_from(cx.db, variant, subst, cx.module);
let visibility = if is_uninhabited {
VariantVisibility::Empty
} else {
VariantVisibility::Visible
};
variants.insert(variant, visibility);
variants.insert(EnumVariantContiguousIndex(i), visibility);
}
ConstructorSet::Variants {
@ -453,10 +478,10 @@ impl<'p> PatCx for MatchCheckCtx<'p> {
f: &mut fmt::Formatter<'_>,
pat: &rustc_pattern_analysis::pat::DeconstructedPat<Self>,
) -> fmt::Result {
let variant =
pat.ty().as_adt().and_then(|(adt, _)| Self::variant_id_for_adt(pat.ctor(), adt));
let db = pat.data().db;
let variant =
pat.ty().as_adt().and_then(|(adt, _)| Self::variant_id_for_adt(db, pat.ctor(), adt));
if let Some(variant) = variant {
match variant {
VariantId::EnumVariantId(v) => {
@ -474,7 +499,7 @@ impl<'p> PatCx for MatchCheckCtx<'p> {
}
fn bug(&self, fmt: fmt::Arguments<'_>) {
debug!("{}", fmt)
never!("{}", fmt)
}
fn complexity_exceeded(&self) -> Result<(), Self::Error> {

View file

@ -4,7 +4,7 @@
use hir_def::{
body::Body,
hir::{Expr, ExprId, UnaryOp},
resolver::{resolver_for_expr, ResolveValueResult, ValueNs},
resolver::{resolver_for_expr, ResolveValueResult, Resolver, ValueNs},
DefWithBodyId,
};
@ -13,9 +13,9 @@ use crate::{
};
pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> Vec<ExprId> {
let infer = db.infer(def);
let mut res = Vec::new();
let _p = tracing::span!(tracing::Level::INFO, "missing_unsafe").entered();
let mut res = Vec::new();
let is_unsafe = match def {
DefWithBodyId::FunctionId(it) => db.function_data(it).has_unsafe_kw(),
DefWithBodyId::StaticId(_)
@ -28,6 +28,7 @@ pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> Vec<ExprId> {
}
let body = db.body(def);
let infer = db.infer(def);
unsafe_expressions(db, &infer, def, &body, body.body_expr, &mut |expr| {
if !expr.inside_unsafe_block {
res.push(expr.expr);
@ -51,14 +52,24 @@ pub fn unsafe_expressions(
current: ExprId,
unsafe_expr_cb: &mut dyn FnMut(UnsafeExpr),
) {
walk_unsafe(db, infer, def, body, current, false, unsafe_expr_cb)
walk_unsafe(
db,
infer,
body,
&mut resolver_for_expr(db.upcast(), def, current),
def,
current,
false,
unsafe_expr_cb,
)
}
fn walk_unsafe(
db: &dyn HirDatabase,
infer: &InferenceResult,
def: DefWithBodyId,
body: &Body,
resolver: &mut Resolver,
def: DefWithBodyId,
current: ExprId,
inside_unsafe_block: bool,
unsafe_expr_cb: &mut dyn FnMut(UnsafeExpr),
@ -73,13 +84,14 @@ fn walk_unsafe(
}
}
Expr::Path(path) => {
let resolver = resolver_for_expr(db.upcast(), def, current);
let g = resolver.update_to_inner_scope(db.upcast(), def, current);
let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path);
if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id), _)) = value_or_partial {
if db.static_data(id).mutable {
unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
}
}
resolver.reset_to_guard(g);
}
Expr::MethodCall { .. } => {
if infer
@ -97,13 +109,13 @@ fn walk_unsafe(
}
Expr::Unsafe { .. } => {
return expr.walk_child_exprs(|child| {
walk_unsafe(db, infer, def, body, child, true, unsafe_expr_cb);
walk_unsafe(db, infer, body, resolver, def, child, true, unsafe_expr_cb);
});
}
_ => {}
}
expr.walk_child_exprs(|child| {
walk_unsafe(db, infer, def, body, child, inside_unsafe_block, unsafe_expr_cb);
walk_unsafe(db, infer, body, resolver, def, child, inside_unsafe_block, unsafe_expr_cb);
});
}

View file

@ -9,6 +9,7 @@ use std::{
use base_db::CrateId;
use chalk_ir::{BoundVar, Safety, TyKind};
use either::Either;
use hir_def::{
data::adt::VariantData,
db::DefDatabase,
@ -27,7 +28,7 @@ use intern::{Internable, Interned};
use itertools::Itertools;
use la_arena::ArenaMap;
use smallvec::SmallVec;
use stdx::never;
use stdx::{never, IsNoneOr};
use triomphe::Arc;
use crate::{
@ -40,10 +41,11 @@ use crate::{
mir::pad16,
primitive, to_assoc_type_id,
utils::{self, detect_variant_from_bytes, generics, ClosureSubst},
AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Const, ConstScalar, ConstValue,
DomainGoal, FnAbi, GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData, LifetimeOutlives,
MemoryMap, Mutability, OpaqueTy, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Scalar,
Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyExt, WhereClause,
AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, ConcreteConst, Const,
ConstScalar, ConstValue, DomainGoal, FnAbi, GenericArg, ImplTraitId, Interner, Lifetime,
LifetimeData, LifetimeOutlives, MemoryMap, Mutability, OpaqueTy, ProjectionTy, ProjectionTyExt,
QuantifiedWhereClause, Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty,
TyExt, WhereClause,
};
pub trait HirWrite: fmt::Write {
@ -58,11 +60,18 @@ impl HirWrite for String {}
impl HirWrite for fmt::Formatter<'_> {}
pub struct HirFormatter<'a> {
/// The database handle
pub db: &'a dyn HirDatabase,
/// The sink to write into
fmt: &'a mut dyn HirWrite,
/// A buffer to intercept writes with, this allows us to track the overall size of the formatted output.
buf: String,
/// The current size of the formatted output.
curr_size: usize,
pub(crate) max_size: Option<usize>,
/// Size from which we should truncate the output.
max_size: Option<usize>,
/// When rendering something that has a concept of "children" (like fields in a struct), this limits
/// how many should be rendered.
pub entity_limit: Option<usize>,
omit_verbose_types: bool,
closure_style: ClosureStyle,
@ -302,7 +311,6 @@ impl DisplayTarget {
#[derive(Debug)]
pub enum DisplaySourceCodeError {
PathNotFound,
UnknownType,
Coroutine,
OpaqueType,
}
@ -414,12 +422,7 @@ impl HirDisplay for ProjectionTy {
let proj_params_count =
self.substitution.len(Interner) - trait_ref.substitution.len(Interner);
let proj_params = &self.substitution.as_slice(Interner)[..proj_params_count];
if !proj_params.is_empty() {
write!(f, "<")?;
f.write_joined(proj_params, ", ")?;
write!(f, ">")?;
}
Ok(())
hir_fmt_generics(f, proj_params, None)
}
}
@ -452,7 +455,7 @@ impl HirDisplay for Const {
ConstValue::Placeholder(idx) => {
let id = from_placeholder_idx(f.db, *idx);
let generics = generics(f.db.upcast(), id.parent);
let param_data = &generics.params.type_or_consts[id.local_id];
let param_data = &generics.params[id.local_id];
write!(f, "{}", param_data.name().unwrap().display(f.db.upcast()))?;
Ok(())
}
@ -460,7 +463,11 @@ impl HirDisplay for Const {
ConstScalar::Bytes(b, m) => render_const_scalar(f, b, m, &data.ty),
ConstScalar::UnevaluatedConst(c, parameters) => {
write!(f, "{}", c.name(f.db.upcast()))?;
hir_fmt_generics(f, parameters, c.generic_def(f.db.upcast()))?;
hir_fmt_generics(
f,
parameters.as_slice(Interner),
c.generic_def(f.db.upcast()),
)?;
Ok(())
}
ConstScalar::Unknown => f.write_char('_'),
@ -936,36 +943,31 @@ impl HirDisplay for Ty {
}
};
f.end_location_link();
if parameters.len(Interner) > 0 {
let generics = generics(db.upcast(), def.into());
let (
parent_params,
self_param,
type_params,
const_params,
_impl_trait_params,
lifetime_params,
) = generics.provenance_split();
let total_len =
parent_params + self_param + type_params + const_params + lifetime_params;
let (parent_len, self_, type_, const_, impl_, lifetime) =
generics.provenance_split();
let parameters = parameters.as_slice(Interner);
// We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
if total_len > 0 {
if parameters.len() - impl_ > 0 {
// `parameters` are in the order of fn's params (including impl traits), fn's lifetimes
// parent's params (those from enclosing impl or trait, if any).
let parameters = parameters.as_slice(Interner);
let fn_params_len = self_param + type_params + const_params;
// This will give slice till last type or const
let fn_params = parameters.get(..fn_params_len);
let fn_lt_params =
parameters.get(fn_params_len..(fn_params_len + lifetime_params));
let parent_params = parameters.get(parameters.len() - parent_params..);
let params = parent_params
.into_iter()
.chain(fn_lt_params)
.chain(fn_params)
.flatten();
let (fn_params, other) =
parameters.split_at(self_ + type_ + const_ + lifetime);
let (_impl, parent_params) = other.split_at(impl_);
debug_assert_eq!(parent_params.len(), parent_len);
let parent_params =
generic_args_sans_defaults(f, Some(def.into()), parent_params);
let fn_params = generic_args_sans_defaults(f, Some(def.into()), fn_params);
write!(f, "<")?;
f.write_joined(params, ", ")?;
hir_fmt_generic_arguments(f, parent_params)?;
if !parent_params.is_empty() && !fn_params.is_empty() {
write!(f, ", ")?;
}
hir_fmt_generic_arguments(f, fn_params)?;
write!(f, ">")?;
}
}
@ -1009,7 +1011,7 @@ impl HirDisplay for Ty {
let generic_def = self.as_generic_def(db);
hir_fmt_generics(f, parameters, generic_def)?;
hir_fmt_generics(f, parameters.as_slice(Interner), generic_def)?;
}
TyKind::AssociatedType(assoc_type_id, parameters) => {
let type_alias = from_assoc_type_id(*assoc_type_id);
@ -1032,20 +1034,15 @@ impl HirDisplay for Ty {
f.end_location_link();
// Note that the generic args for the associated type come before those for the
// trait (including the self type).
// FIXME: reconsider the generic args order upon formatting?
if parameters.len(Interner) > 0 {
write!(f, "<")?;
f.write_joined(parameters.as_slice(Interner), ", ")?;
write!(f, ">")?;
}
hir_fmt_generics(f, parameters.as_slice(Interner), None)
} else {
let projection_ty = ProjectionTy {
associated_ty_id: to_assoc_type_id(type_alias),
substitution: parameters.clone(),
};
projection_ty.hir_fmt(f)?;
}
projection_ty.hir_fmt(f)
}?;
}
TyKind::Foreign(type_alias) => {
let alias = from_foreign_def_id(*type_alias);
@ -1072,6 +1069,7 @@ impl HirDisplay for Ty {
write_bounds_like_dyn_trait_with_prefix(
f,
"impl",
Either::Left(self),
bounds.skip_binders(),
SizedByDefault::Sized { anchor: krate },
)?;
@ -1087,6 +1085,7 @@ impl HirDisplay for Ty {
write_bounds_like_dyn_trait_with_prefix(
f,
"impl",
Either::Left(self),
bounds.skip_binders(),
SizedByDefault::Sized { anchor: krate },
)?;
@ -1137,7 +1136,7 @@ impl HirDisplay for Ty {
}
ClosureStyle::ClosureWithSubst => {
write!(f, "{{closure#{:?}}}", id.0.as_u32())?;
return hir_fmt_generics(f, substs, None);
return hir_fmt_generics(f, substs.as_slice(Interner), None);
}
_ => (),
}
@ -1173,7 +1172,7 @@ impl HirDisplay for Ty {
TyKind::Placeholder(idx) => {
let id = from_placeholder_idx(db, *idx);
let generics = generics(db.upcast(), id.parent);
let param_data = &generics.params.type_or_consts[id.local_id];
let param_data = &generics.params[id.local_id];
match param_data {
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
@ -1189,21 +1188,24 @@ impl HirDisplay for Ty {
.generic_predicates(id.parent)
.iter()
.map(|pred| pred.clone().substitute(Interner, &substs))
.filter(|wc| match &wc.skip_binders() {
.filter(|wc| match wc.skip_binders() {
WhereClause::Implemented(tr) => {
&tr.self_type_parameter(Interner) == self
tr.self_type_parameter(Interner) == *self
}
WhereClause::AliasEq(AliasEq {
alias: AliasTy::Projection(proj),
ty: _,
}) => &proj.self_type_parameter(db) == self,
_ => false,
}) => proj.self_type_parameter(db) == *self,
WhereClause::AliasEq(_) => false,
WhereClause::TypeOutlives(to) => to.ty == *self,
WhereClause::LifetimeOutlives(_) => false,
})
.collect::<Vec<_>>();
let krate = id.parent.module(db.upcast()).krate();
write_bounds_like_dyn_trait_with_prefix(
f,
"impl",
Either::Left(self),
&bounds,
SizedByDefault::Sized { anchor: krate },
)?;
@ -1229,6 +1231,7 @@ impl HirDisplay for Ty {
write_bounds_like_dyn_trait_with_prefix(
f,
"dyn",
Either::Left(self),
&bounds,
SizedByDefault::NotSized,
)?;
@ -1252,6 +1255,7 @@ impl HirDisplay for Ty {
write_bounds_like_dyn_trait_with_prefix(
f,
"impl",
Either::Left(self),
bounds.skip_binders(),
SizedByDefault::Sized { anchor: krate },
)?;
@ -1266,6 +1270,7 @@ impl HirDisplay for Ty {
write_bounds_like_dyn_trait_with_prefix(
f,
"impl",
Either::Left(self),
bounds.skip_binders(),
SizedByDefault::Sized { anchor: krate },
)?;
@ -1277,11 +1282,10 @@ impl HirDisplay for Ty {
}
TyKind::Error => {
if f.display_target.is_source_code() {
return Err(HirDisplayError::DisplaySourceCodeError(
DisplaySourceCodeError::UnknownType,
));
f.write_char('_')?;
} else {
write!(f, "{{unknown}}")?;
}
write!(f, "{{unknown}}")?;
}
TyKind::InferenceVar(..) => write!(f, "_")?,
TyKind::Coroutine(_, subst) => {
@ -1318,93 +1322,92 @@ impl HirDisplay for Ty {
fn hir_fmt_generics(
f: &mut HirFormatter<'_>,
parameters: &Substitution,
parameters: &[GenericArg],
generic_def: Option<hir_def::GenericDefId>,
) -> Result<(), HirDisplayError> {
let db = f.db;
if parameters.len(Interner) > 0 {
use std::cmp::Ordering;
let param_compare =
|a: &GenericArg, b: &GenericArg| match (a.data(Interner), b.data(Interner)) {
(crate::GenericArgData::Lifetime(_), crate::GenericArgData::Lifetime(_)) => {
Ordering::Equal
}
(crate::GenericArgData::Lifetime(_), _) => Ordering::Less,
(_, crate::GenericArgData::Lifetime(_)) => Ordering::Less,
(_, _) => Ordering::Equal,
};
let parameters_to_write = if f.display_target.is_source_code() || f.omit_verbose_types() {
match generic_def
.map(|generic_def_id| db.generic_defaults(generic_def_id))
.filter(|defaults| !defaults.is_empty())
{
None => parameters.as_slice(Interner),
Some(default_parameters) => {
fn should_show(
parameter: &GenericArg,
default_parameters: &[Binders<GenericArg>],
i: usize,
parameters: &Substitution,
) -> bool {
if parameter.ty(Interner).map(|it| it.kind(Interner))
== Some(&TyKind::Error)
{
return true;
}
if let Some(ConstValue::Concrete(c)) =
parameter.constant(Interner).map(|it| &it.data(Interner).value)
{
if c.interned == ConstScalar::Unknown {
return true;
}
}
if parameter.lifetime(Interner).map(|it| it.data(Interner))
== Some(&crate::LifetimeData::Static)
{
return true;
}
let default_parameter = match default_parameters.get(i) {
Some(it) => it,
None => return true,
};
let actual_default =
default_parameter.clone().substitute(Interner, &parameters);
parameter != &actual_default
}
let mut default_from = 0;
for (i, parameter) in parameters.iter(Interner).enumerate() {
if should_show(parameter, &default_parameters, i, parameters) {
default_from = i + 1;
}
}
&parameters.as_slice(Interner)[0..default_from]
}
}
} else {
parameters.as_slice(Interner)
};
//FIXME: Should handle the ordering of lifetimes when creating substitutions
let mut parameters_to_write = parameters_to_write.to_vec();
parameters_to_write.sort_by(param_compare);
if !parameters_to_write.is_empty() {
write!(f, "<")?;
let mut first = true;
for generic_arg in parameters_to_write {
if !first {
write!(f, ", ")?;
}
first = false;
if f.display_target.is_source_code()
&& generic_arg.ty(Interner).map(|ty| ty.kind(Interner)) == Some(&TyKind::Error)
{
write!(f, "_")?;
} else {
generic_arg.hir_fmt(f)?;
}
}
if parameters.is_empty() {
return Ok(());
}
write!(f, ">")?;
let parameters_to_write = generic_args_sans_defaults(f, generic_def, parameters);
if !parameters_to_write.is_empty() {
write!(f, "<")?;
hir_fmt_generic_arguments(f, parameters_to_write)?;
write!(f, ">")?;
}
Ok(())
}
fn generic_args_sans_defaults<'ga>(
f: &mut HirFormatter<'_>,
generic_def: Option<hir_def::GenericDefId>,
parameters: &'ga [GenericArg],
) -> &'ga [GenericArg] {
if f.display_target.is_source_code() || f.omit_verbose_types() {
match generic_def
.map(|generic_def_id| f.db.generic_defaults(generic_def_id))
.filter(|it| !it.is_empty())
{
None => parameters,
Some(default_parameters) => {
let should_show = |arg: &GenericArg, i: usize| {
let is_err = |arg: &GenericArg| match arg.data(Interner) {
chalk_ir::GenericArgData::Lifetime(it) => {
*it.data(Interner) == LifetimeData::Error
}
chalk_ir::GenericArgData::Ty(it) => *it.kind(Interner) == TyKind::Error,
chalk_ir::GenericArgData::Const(it) => matches!(
it.data(Interner).value,
ConstValue::Concrete(ConcreteConst {
interned: ConstScalar::Unknown,
..
})
),
};
// if the arg is error like, render it to inform the user
if is_err(arg) {
return true;
}
// otherwise, if the arg is equal to the param default, hide it (unless the
// default is an error which can happen for the trait Self type)
default_parameters.get(i).is_none_or(|default_parameter| {
// !is_err(default_parameter.skip_binders())
// &&
arg != &default_parameter.clone().substitute(Interner, &parameters)
})
};
let mut default_from = 0;
for (i, parameter) in parameters.iter().enumerate() {
if should_show(parameter, i) {
default_from = i + 1;
}
}
&parameters[0..default_from]
}
}
} else {
parameters
}
}
fn hir_fmt_generic_arguments(
f: &mut HirFormatter<'_>,
parameters: &[GenericArg],
) -> Result<(), HirDisplayError> {
let mut first = true;
let lifetime_offset = parameters.iter().position(|arg| arg.lifetime(Interner).is_some());
let (ty_or_const, lifetimes) = match lifetime_offset {
Some(offset) => parameters.split_at(offset),
None => (parameters, &[][..]),
};
for generic_arg in lifetimes.iter().chain(ty_or_const) {
if !first {
write!(f, ", ")?;
}
first = false;
generic_arg.hir_fmt(f)?;
}
Ok(())
}
@ -1468,6 +1471,7 @@ impl SizedByDefault {
pub fn write_bounds_like_dyn_trait_with_prefix(
f: &mut HirFormatter<'_>,
prefix: &str,
this: Either<&Ty, &Lifetime>,
predicates: &[QuantifiedWhereClause],
default_sized: SizedByDefault,
) -> Result<(), HirDisplayError> {
@ -1476,7 +1480,7 @@ pub fn write_bounds_like_dyn_trait_with_prefix(
|| predicates.is_empty() && matches!(default_sized, SizedByDefault::Sized { .. })
{
write!(f, " ")?;
write_bounds_like_dyn_trait(f, predicates, default_sized)
write_bounds_like_dyn_trait(f, this, predicates, default_sized)
} else {
Ok(())
}
@ -1484,6 +1488,7 @@ pub fn write_bounds_like_dyn_trait_with_prefix(
fn write_bounds_like_dyn_trait(
f: &mut HirFormatter<'_>,
this: Either<&Ty, &Lifetime>,
predicates: &[QuantifiedWhereClause],
default_sized: SizedByDefault,
) -> Result<(), HirDisplayError> {
@ -1524,23 +1529,54 @@ fn write_bounds_like_dyn_trait(
f.start_location_link(trait_.into());
write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast()))?;
f.end_location_link();
if let [_, params @ ..] = trait_ref.substitution.as_slice(Interner) {
if is_fn_trait {
if is_fn_trait {
if let [_self, params @ ..] = trait_ref.substitution.as_slice(Interner) {
if let Some(args) =
params.first().and_then(|it| it.assert_ty_ref(Interner).as_tuple())
{
write!(f, "(")?;
f.write_joined(args.as_slice(Interner), ", ")?;
hir_fmt_generic_arguments(f, args.as_slice(Interner))?;
write!(f, ")")?;
}
} else if !params.is_empty() {
write!(f, "<")?;
f.write_joined(params, ", ")?;
// there might be assoc type bindings, so we leave the angle brackets open
angle_open = true;
}
} else {
let params = generic_args_sans_defaults(
f,
Some(trait_.into()),
trait_ref.substitution.as_slice(Interner),
);
if let [_self, params @ ..] = params {
if !params.is_empty() {
write!(f, "<")?;
hir_fmt_generic_arguments(f, params)?;
// there might be assoc type bindings, so we leave the angle brackets open
angle_open = true;
}
}
}
}
WhereClause::TypeOutlives(to) if Either::Left(&to.ty) == this => {
if !is_fn_trait && angle_open {
write!(f, ">")?;
angle_open = false;
}
if !first {
write!(f, " + ")?;
}
to.lifetime.hir_fmt(f)?;
}
WhereClause::TypeOutlives(_) => {}
WhereClause::LifetimeOutlives(lo) if Either::Right(&lo.a) == this => {
if !is_fn_trait && angle_open {
write!(f, ">")?;
angle_open = false;
}
if !first {
write!(f, " + ")?;
}
lo.b.hir_fmt(f)?;
}
WhereClause::LifetimeOutlives(_) => {}
WhereClause::AliasEq(alias_eq) if is_fn_trait => {
is_fn_trait = false;
if !alias_eq.ty.is_unit() {
@ -1567,9 +1603,9 @@ fn write_bounds_like_dyn_trait(
let proj_arg_count = generics(f.db.upcast(), assoc_ty_id.into()).len_self();
if proj_arg_count > 0 {
write!(f, "<")?;
f.write_joined(
hir_fmt_generic_arguments(
f,
&proj.substitution.as_slice(Interner)[..proj_arg_count],
", ",
)?;
write!(f, ">")?;
}
@ -1577,10 +1613,6 @@ fn write_bounds_like_dyn_trait(
}
ty.hir_fmt(f)?;
}
// FIXME implement these
WhereClause::LifetimeOutlives(_) => {}
WhereClause::TypeOutlives(_) => {}
}
first = false;
}
@ -1630,12 +1662,7 @@ fn fmt_trait_ref(
f.start_location_link(trait_.into());
write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast()))?;
f.end_location_link();
if tr.substitution.len(Interner) > 1 {
write!(f, "<")?;
f.write_joined(&tr.substitution.as_slice(Interner)[1..], ", ")?;
write!(f, ">")?;
}
Ok(())
hir_fmt_generics(f, &tr.substitution.as_slice(Interner)[1..], None)
}
impl HirDisplay for TraitRef {
@ -1690,16 +1717,18 @@ impl HirDisplay for Lifetime {
impl HirDisplay for LifetimeData {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
match self {
LifetimeData::BoundVar(idx) => idx.hir_fmt(f),
LifetimeData::InferenceVar(_) => write!(f, "_"),
LifetimeData::Placeholder(idx) => {
let id = lt_from_placeholder_idx(f.db, *idx);
let generics = generics(f.db.upcast(), id.parent);
let param_data = &generics.params.lifetimes[id.local_id];
let param_data = &generics.params[id.local_id];
write!(f, "{}", param_data.name.display(f.db.upcast()))?;
Ok(())
}
_ if f.display_target.is_source_code() => write!(f, "'_"),
LifetimeData::BoundVar(idx) => idx.hir_fmt(f),
LifetimeData::InferenceVar(_) => write!(f, "_"),
LifetimeData::Static => write!(f, "'static"),
LifetimeData::Error => write!(f, "'{{error}}"),
LifetimeData::Erased => Ok(()),
LifetimeData::Phantom(_, _) => Ok(()),
}

View file

@ -55,12 +55,12 @@ use triomphe::Arc;
use crate::{
db::HirDatabase,
fold_tys,
error_lifetime, fold_tys,
infer::{coerce::CoerceMany, unify::InferenceTable},
lower::ImplTraitLoweringMode,
static_lifetime, to_assoc_type_id,
to_assoc_type_id,
traits::FnTrait,
utils::{InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder},
utils::{Generics, InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder},
AliasEq, AliasTy, Binders, ClosureId, Const, DomainGoal, GenericArg, Goal, ImplTraitId,
ImplTraitIdx, InEnvironment, Interner, Lifetime, OpaqueTyId, ProjectionTy, Substitution,
TraitEnvironment, Ty, TyBuilder, TyExt,
@ -326,7 +326,7 @@ pub struct Adjustment {
impl Adjustment {
pub fn borrow(m: Mutability, ty: Ty) -> Self {
let ty = TyKind::Ref(m, static_lifetime(), ty).intern(Interner);
let ty = TyKind::Ref(m, error_lifetime(), ty).intern(Interner);
Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(m)), target: ty }
}
}
@ -429,7 +429,10 @@ pub struct InferenceResult {
/// Type of the result of `.into_iter()` on the for. `ExprId` is the one of the whole for loop.
pub type_of_for_iterator: FxHashMap<ExprId, Ty>,
type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch>,
/// Whether there are any type-mismatching errors in the result.
pub(crate) has_errors: bool,
/// Interned common types to return references to.
// FIXME: Move this into `InferenceContext`
standard_types: InternedStandardTypes,
/// Stores the types which were implicitly dereferenced in pattern binding modes.
pub pat_adjustments: FxHashMap<PatId, Vec<Ty>>,
@ -628,6 +631,10 @@ impl<'a> InferenceContext<'a> {
}
}
pub(crate) fn generics(&self) -> Option<Generics> {
Some(crate::utils::generics(self.db.upcast(), self.resolver.generic_def()?))
}
// FIXME: This function should be private in module. It is currently only used in the consteval, since we need
// `InferenceResult` in the middle of inference. See the fixme comment in `consteval::eval_to_const`. If you
// used this function for another workaround, mention it here. If you really need this function and believe that
@ -654,6 +661,7 @@ impl<'a> InferenceContext<'a> {
type_of_rpit,
type_of_for_iterator,
type_mismatches,
has_errors,
standard_types: _,
pat_adjustments,
binding_modes: _,
@ -695,6 +703,9 @@ impl<'a> InferenceContext<'a> {
for ty in type_of_for_iterator.values_mut() {
*ty = table.resolve_completely(ty.clone());
}
*has_errors = !type_mismatches.is_empty();
type_mismatches.retain(|_, mismatch| {
mismatch.expected = table.resolve_completely(mismatch.expected.clone());
mismatch.actual = table.resolve_completely(mismatch.actual.clone());
@ -1646,9 +1657,11 @@ impl std::ops::BitOrAssign for Diverges {
*self = *self | other;
}
}
/// A zipper that checks for unequal `{unknown}` occurrences in the two types. Used to filter out
/// mismatch diagnostics that only differ in `{unknown}`. These mismatches are usually not helpful.
/// As the cause is usually an underlying name resolution problem.
/// A zipper that checks for unequal occurrences of `{unknown}` and unresolved projections
/// in the two types. Used to filter out mismatch diagnostics that only differ in
/// `{unknown}` and unresolved projections. These mismatches are usually not helpful.
/// As the cause is usually an underlying name resolution problem
struct UnknownMismatch<'db>(&'db dyn HirDatabase);
impl chalk_ir::zip::Zipper<Interner> for UnknownMismatch<'_> {
fn zip_tys(&mut self, variance: Variance, a: &Ty, b: &Ty) -> chalk_ir::Fallible<()> {
@ -1721,7 +1734,12 @@ impl chalk_ir::zip::Zipper<Interner> for UnknownMismatch<'_> {
zip_substs(self, None, &fn_ptr_a.substitution.0, &fn_ptr_b.substitution.0)?
}
(TyKind::Error, TyKind::Error) => (),
(TyKind::Error, _) | (_, TyKind::Error) => return Err(chalk_ir::NoSolution),
(TyKind::Error, _)
| (_, TyKind::Error)
| (TyKind::Alias(AliasTy::Projection(_)) | TyKind::AssociatedType(_, _), _)
| (_, TyKind::Alias(AliasTy::Projection(_)) | TyKind::AssociatedType(_, _)) => {
return Err(chalk_ir::NoSolution)
}
_ => (),
}

View file

@ -19,10 +19,6 @@ impl CastCheck {
let expr_ty = table.resolve_ty_shallow(&self.expr_ty);
let cast_ty = table.resolve_ty_shallow(&self.cast_ty);
if expr_ty.contains_unknown() || cast_ty.contains_unknown() {
return;
}
if table.coerce(&expr_ty, &cast_ty).is_ok() {
return;
}

View file

@ -22,11 +22,11 @@ use stdx::never;
use crate::{
db::{HirDatabase, InternedClosure},
from_chalk_trait_id, from_placeholder_idx, make_binders,
error_lifetime, from_chalk_trait_id, from_placeholder_idx, make_binders,
mir::{BorrowKind, MirSpan, MutBorrowKind, ProjectionElem},
static_lifetime, to_chalk_trait_id,
to_chalk_trait_id,
traits::FnTrait,
utils::{self, elaborate_clause_supertraits, generics, Generics},
utils::{self, elaborate_clause_supertraits, Generics},
Adjust, Adjustment, AliasEq, AliasTy, Binders, BindingMode, ChalkTraitId, ClosureId, DynTy,
DynTyExt, FnAbi, FnPointer, FnSig, Interner, OpaqueTy, ProjectionTyExt, Substitution, Ty,
TyExt, WhereClause,
@ -324,21 +324,17 @@ impl CapturedItemWithoutTy {
BorrowKind::Mut { .. } => Mutability::Mut,
_ => Mutability::Not,
};
TyKind::Ref(m, static_lifetime(), ty).intern(Interner)
TyKind::Ref(m, error_lifetime(), ty).intern(Interner)
}
};
return CapturedItem {
place: self.place,
kind: self.kind,
span: self.span,
ty: replace_placeholder_with_binder(ctx.db, ctx.owner, ty),
ty: replace_placeholder_with_binder(ctx, ty),
};
fn replace_placeholder_with_binder(
db: &dyn HirDatabase,
owner: DefWithBodyId,
ty: Ty,
) -> Binders<Ty> {
fn replace_placeholder_with_binder(ctx: &mut InferenceContext<'_>, ty: Ty) -> Binders<Ty> {
struct Filler<'a> {
db: &'a dyn HirDatabase,
generics: Generics,
@ -361,7 +357,7 @@ impl CapturedItemWithoutTy {
outer_binder: DebruijnIndex,
) -> Result<chalk_ir::Const<Interner>, Self::Error> {
let x = from_placeholder_idx(self.db, idx);
let Some(idx) = self.generics.param_idx(x) else {
let Some(idx) = self.generics.type_or_const_param_idx(x) else {
return Err(());
};
Ok(BoundVar::new(outer_binder, idx).to_const(Interner, ty))
@ -373,18 +369,18 @@ impl CapturedItemWithoutTy {
outer_binder: DebruijnIndex,
) -> std::result::Result<Ty, Self::Error> {
let x = from_placeholder_idx(self.db, idx);
let Some(idx) = self.generics.param_idx(x) else {
let Some(idx) = self.generics.type_or_const_param_idx(x) else {
return Err(());
};
Ok(BoundVar::new(outer_binder, idx).to_ty(Interner))
}
}
let Some(generic_def) = owner.as_generic_def_id() else {
let Some(generics) = ctx.generics() else {
return Binders::empty(Interner, ty);
};
let filler = &mut Filler { db, generics: generics(db.upcast(), generic_def) };
let filler = &mut Filler { db: ctx.db, generics };
let result = ty.clone().try_fold_with(filler, DebruijnIndex::INNERMOST).unwrap_or(ty);
make_binders(db, &filler.generics, result)
make_binders(ctx.db, &filler.generics, result)
}
}
}

View file

@ -18,11 +18,11 @@ use triomphe::Arc;
use crate::{
autoderef::{Autoderef, AutoderefKind},
db::HirDatabase,
error_lifetime,
infer::{
Adjust, Adjustment, AutoBorrow, InferOk, InferenceContext, OverloadedDeref, PointerCast,
TypeError, TypeMismatch,
},
static_lifetime,
utils::ClosureSubst,
Canonical, DomainGoal, FnAbi, FnPointer, FnSig, Guidance, InEnvironment, Interner, Solution,
Substitution, TraitEnvironment, Ty, TyBuilder, TyExt,
@ -427,7 +427,7 @@ impl InferenceTable<'_> {
// compare those. Note that this means we use the target
// mutability [1], since it may be that we are coercing
// from `&mut T` to `&U`.
let lt = static_lifetime(); // FIXME: handle lifetimes correctly, see rustc
let lt = error_lifetime(); // FIXME: handle lifetimes correctly, see rustc
let derefd_from_ty = TyKind::Ref(to_mt, lt, referent_ty).intern(Interner);
match autoderef.table.try_unify(&derefd_from_ty, to_ty) {
Ok(result) => {
@ -621,7 +621,7 @@ impl InferenceTable<'_> {
(TyKind::Ref(from_mt, _, from_inner), &TyKind::Ref(to_mt, _, _)) => {
coerce_mutabilities(*from_mt, to_mt)?;
let lt = static_lifetime();
let lt = error_lifetime();
Some((
Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() },
Adjustment {

View file

@ -23,6 +23,7 @@ use crate::{
autoderef::{builtin_deref, deref_by_trait, Autoderef},
consteval,
db::{InternedClosure, InternedCoroutine},
error_lifetime,
infer::{
coerce::{CoerceMany, CoercionCause},
find_continuable,
@ -630,7 +631,7 @@ impl InferenceContext<'_> {
let inner_ty = self.infer_expr_inner(*expr, &expectation);
match rawness {
Rawness::RawPtr => TyKind::Raw(mutability, inner_ty),
Rawness::Ref => TyKind::Ref(mutability, static_lifetime(), inner_ty),
Rawness::Ref => TyKind::Ref(mutability, error_lifetime(), inner_ty),
}
.intern(Interner)
}
@ -1039,18 +1040,12 @@ impl InferenceContext<'_> {
(
elem_ty,
if let Some(g_def) = self.owner.as_generic_def_id() {
let generics = generics(self.db.upcast(), g_def);
consteval::eval_to_const(
repeat,
ParamLoweringMode::Placeholder,
self,
|| generics,
DebruijnIndex::INNERMOST,
)
} else {
consteval::usize_const(self.db, None, krate)
},
consteval::eval_to_const(
repeat,
ParamLoweringMode::Placeholder,
self,
DebruijnIndex::INNERMOST,
),
)
}
};
@ -1851,7 +1846,7 @@ impl InferenceContext<'_> {
ty,
c,
ParamLoweringMode::Placeholder,
|| generics(this.db.upcast(), this.resolver.generic_def().unwrap()),
|| this.generics(),
DebruijnIndex::INNERMOST,
)
},

View file

@ -2,21 +2,22 @@
use std::iter::repeat_with;
use chalk_ir::Mutability;
use hir_def::{
body::Body,
hir::{Binding, BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, Literal, Pat, PatId},
path::Path,
};
use hir_expand::name::Name;
use stdx::TupleExt;
use crate::{
consteval::{try_const_usize, usize_const},
error_lifetime,
infer::{BindingMode, Expectation, InferenceContext, TypeMismatch},
lower::lower_to_chalk_mutability,
primitive::UintTy,
static_lifetime, InferenceDiagnostic, Interner, Scalar, Substitution, Ty, TyBuilder, TyExt,
TyKind,
static_lifetime, InferenceDiagnostic, Interner, Mutability, Scalar, Substitution, Ty,
TyBuilder, TyExt, TyKind,
};
/// Used to generalize patterns and assignee expressions.
@ -89,9 +90,6 @@ impl InferenceContext<'_> {
self.unify(&ty, expected);
let substs =
ty.as_adt().map(|(_, s)| s.clone()).unwrap_or_else(|| Substitution::empty(Interner));
match def {
_ if subs.is_empty() => {}
Some(def) => {
@ -108,8 +106,10 @@ impl InferenceContext<'_> {
let pre_iter = pre.iter().enumerate();
let post_iter = (post_idx_offset..).zip(post.iter());
let substs = ty.as_adt().map(TupleExt::tail);
for (i, &subpat) in pre_iter.chain(post_iter) {
let field_def = {
let expected_ty = {
match variant_data.field(&Name::new_tuple_field(i)) {
Some(local_id) => {
if !visibilities[local_id]
@ -117,17 +117,17 @@ impl InferenceContext<'_> {
{
// FIXME(DIAGNOSE): private tuple field
}
Some(local_id)
let f = field_types[local_id].clone();
let expected_ty = match substs {
Some(substs) => f.substitute(Interner, substs),
None => f.substitute(Interner, &Substitution::empty(Interner)),
};
self.normalize_associated_types_in(expected_ty)
}
None => None,
None => self.err_ty(),
}
};
let expected_ty = field_def.map_or(self.err_ty(), |f| {
field_types[f].clone().substitute(Interner, &substs)
});
let expected_ty = self.normalize_associated_types_in(expected_ty);
T::infer(self, subpat, &expected_ty, default_bm);
}
}
@ -149,7 +149,7 @@ impl InferenceContext<'_> {
expected: &Ty,
default_bm: T::BindingMode,
id: T,
subs: impl Iterator<Item = (Name, T)> + ExactSizeIterator,
subs: impl ExactSizeIterator<Item = (Name, T)>,
) -> Ty {
let (ty, def) = self.resolve_variant(path, false);
if let Some(variant) = def {
@ -158,9 +158,6 @@ impl InferenceContext<'_> {
self.unify(&ty, expected);
let substs =
ty.as_adt().map(|(_, s)| s.clone()).unwrap_or_else(|| Substitution::empty(Interner));
match def {
_ if subs.len() == 0 => {}
Some(def) => {
@ -168,8 +165,10 @@ impl InferenceContext<'_> {
let variant_data = def.variant_data(self.db.upcast());
let visibilities = self.db.field_visibilities(def);
let substs = ty.as_adt().map(TupleExt::tail);
for (name, inner) in subs {
let field_def = {
let expected_ty = {
match variant_data.field(&name) {
Some(local_id) => {
if !visibilities[local_id]
@ -180,23 +179,23 @@ impl InferenceContext<'_> {
private: true,
});
}
Some(local_id)
let f = field_types[local_id].clone();
let expected_ty = match substs {
Some(substs) => f.substitute(Interner, substs),
None => f.substitute(Interner, &Substitution::empty(Interner)),
};
self.normalize_associated_types_in(expected_ty)
}
None => {
self.push_diagnostic(InferenceDiagnostic::NoSuchField {
field: inner.into(),
private: false,
});
None
self.err_ty()
}
}
};
let expected_ty = field_def.map_or(self.err_ty(), |f| {
field_types[f].clone().substitute(Interner, &substs)
});
let expected_ty = self.normalize_associated_types_in(expected_ty);
T::infer(self, inner, &expected_ty, default_bm);
}
}
@ -396,14 +395,14 @@ impl InferenceContext<'_> {
None => {
let inner_ty = self.table.new_type_var();
let ref_ty =
TyKind::Ref(mutability, static_lifetime(), inner_ty.clone()).intern(Interner);
TyKind::Ref(mutability, error_lifetime(), inner_ty.clone()).intern(Interner);
// Unification failure will be reported by the caller.
self.unify(&ref_ty, expected);
inner_ty
}
};
let subty = self.infer_pat(inner_pat, &expectation, default_bm);
TyKind::Ref(mutability, static_lifetime(), subty).intern(Interner)
TyKind::Ref(mutability, error_lifetime(), subty).intern(Interner)
}
fn infer_bind_pat(
@ -430,7 +429,7 @@ impl InferenceContext<'_> {
let bound_ty = match mode {
BindingMode::Ref(mutability) => {
TyKind::Ref(mutability, static_lifetime(), inner_ty.clone()).intern(Interner)
TyKind::Ref(mutability, error_lifetime(), inner_ty.clone()).intern(Interner)
}
BindingMode::Move => inner_ty.clone(),
};

View file

@ -16,8 +16,8 @@ use triomphe::Arc;
use super::{InferOk, InferResult, InferenceContext, TypeError};
use crate::{
consteval::unknown_const, db::HirDatabase, fold_generic_args, fold_tys_and_consts,
static_lifetime, to_chalk_trait_id, traits::FnTrait, AliasEq, AliasTy, BoundVar, Canonical,
consteval::unknown_const, db::HirDatabase, error_lifetime, fold_generic_args,
fold_tys_and_consts, to_chalk_trait_id, traits::FnTrait, AliasEq, AliasTy, BoundVar, Canonical,
Const, ConstValue, DebruijnIndex, DomainGoal, GenericArg, GenericArgData, Goal, GoalData,
Guidance, InEnvironment, InferenceVar, Interner, Lifetime, OpaqueTyId, ParamKind, ProjectionTy,
ProjectionTyExt, Scalar, Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt,
@ -43,40 +43,21 @@ impl InferenceContext<'_> {
let obligations = pending_obligations
.iter()
.filter_map(|obligation| match obligation.value.value.goal.data(Interner) {
GoalData::DomainGoal(DomainGoal::Holds(
clause @ WhereClause::AliasEq(AliasEq {
alias: AliasTy::Projection(projection),
..
}),
)) => {
let projection_self = projection.self_type_parameter(self.db);
let uncanonical = chalk_ir::Substitute::apply(
&obligation.free_vars,
projection_self,
Interner,
);
if matches!(
self.resolve_ty_shallow(&uncanonical).kind(Interner),
TyKind::InferenceVar(iv, TyVariableKind::General) if *iv == root,
) {
Some(chalk_ir::Substitute::apply(
&obligation.free_vars,
clause.clone(),
Interner,
))
} else {
None
}
}
GoalData::DomainGoal(DomainGoal::Holds(
clause @ WhereClause::Implemented(trait_ref),
)) => {
let trait_ref_self = trait_ref.self_type_parameter(Interner);
let uncanonical = chalk_ir::Substitute::apply(
&obligation.free_vars,
trait_ref_self,
Interner,
);
GoalData::DomainGoal(DomainGoal::Holds(clause)) => {
let ty = match clause {
WhereClause::AliasEq(AliasEq {
alias: AliasTy::Projection(projection),
..
}) => projection.self_type_parameter(self.db),
WhereClause::Implemented(trait_ref) => {
trait_ref.self_type_parameter(Interner)
}
WhereClause::TypeOutlives(to) => to.ty.clone(),
_ => return None,
};
let uncanonical =
chalk_ir::Substitute::apply(&obligation.free_vars, ty, Interner);
if matches!(
self.resolve_ty_shallow(&uncanonical).kind(Interner),
TyKind::InferenceVar(iv, TyVariableKind::General) if *iv == root,
@ -121,8 +102,9 @@ impl<T: HasInterner<Interner = Interner>> Canonicalized<T> {
VariableKind::Ty(TyVariableKind::General) => ctx.new_type_var().cast(Interner),
VariableKind::Ty(TyVariableKind::Integer) => ctx.new_integer_var().cast(Interner),
VariableKind::Ty(TyVariableKind::Float) => ctx.new_float_var().cast(Interner),
// Chalk can sometimes return new lifetime variables. We just use the static lifetime everywhere
VariableKind::Lifetime => static_lifetime().cast(Interner),
// Chalk can sometimes return new lifetime variables. We just replace them by errors
// for now.
VariableKind::Lifetime => error_lifetime().cast(Interner),
VariableKind::Const(ty) => ctx.new_const_var(ty.clone()).cast(Interner),
}),
);
@ -1020,11 +1002,11 @@ mod resolve {
_var: InferenceVar,
_outer_binder: DebruijnIndex,
) -> Lifetime {
// fall back all lifetimes to 'static -- currently we don't deal
// fall back all lifetimes to 'error -- currently we don't deal
// with any lifetimes, but we can sometimes get some lifetime
// variables through Chalk's unification, and this at least makes
// sure we don't leak them outside of inference
crate::static_lifetime()
crate::error_lifetime()
}
}
}

View file

@ -5,42 +5,36 @@ use chalk_ir::{
visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
DebruijnIndex,
};
use hir_def::{
attr::Attrs, data::adt::VariantData, visibility::Visibility, AdtId, EnumVariantId, HasModule,
ModuleId, VariantId,
};
use hir_def::{visibility::Visibility, AdtId, EnumVariantId, HasModule, ModuleId, VariantId};
use rustc_hash::FxHashSet;
use crate::{
consteval::try_const_usize, db::HirDatabase, Binders, Interner, Substitution, Ty, TyKind,
};
// FIXME: Turn this into a query, it can be quite slow
/// Checks whether a type is visibly uninhabited from a particular module.
pub(crate) fn is_ty_uninhabited_from(ty: &Ty, target_mod: ModuleId, db: &dyn HirDatabase) -> bool {
pub(crate) fn is_ty_uninhabited_from(db: &dyn HirDatabase, ty: &Ty, target_mod: ModuleId) -> bool {
let _p = tracing::span!(tracing::Level::INFO, "is_ty_uninhabited_from", ?ty).entered();
let mut uninhabited_from =
UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default() };
let inhabitedness = ty.visit_with(&mut uninhabited_from, DebruijnIndex::INNERMOST);
inhabitedness == BREAK_VISIBLY_UNINHABITED
}
// FIXME: Turn this into a query, it can be quite slow
/// Checks whether a variant is visibly uninhabited from a particular module.
pub(crate) fn is_enum_variant_uninhabited_from(
db: &dyn HirDatabase,
variant: EnumVariantId,
subst: &Substitution,
target_mod: ModuleId,
db: &dyn HirDatabase,
) -> bool {
let is_local = variant.module(db.upcast()).krate() == target_mod.krate();
let _p = tracing::span!(tracing::Level::INFO, "is_enum_variant_uninhabited_from").entered();
let mut uninhabited_from =
UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default() };
let inhabitedness = uninhabited_from.visit_variant(
variant.into(),
&db.enum_variant_data(variant).variant_data,
subst,
&db.attrs(variant.into()),
is_local,
);
let inhabitedness = uninhabited_from.visit_variant(variant.into(), subst);
inhabitedness == BREAK_VISIBLY_UNINHABITED
}
@ -98,34 +92,18 @@ impl TypeVisitor<Interner> for UninhabitedFrom<'_> {
impl UninhabitedFrom<'_> {
fn visit_adt(&mut self, adt: AdtId, subst: &Substitution) -> ControlFlow<VisiblyUninhabited> {
let attrs = self.db.attrs(adt.into());
let adt_non_exhaustive = attrs.by_key("non_exhaustive").exists();
let is_local = adt.module(self.db.upcast()).krate() == self.target_mod.krate();
if adt_non_exhaustive && !is_local {
return CONTINUE_OPAQUELY_INHABITED;
}
// An ADT is uninhabited iff all its variants uninhabited.
match adt {
// rustc: For now, `union`s are never considered uninhabited.
AdtId::UnionId(_) => CONTINUE_OPAQUELY_INHABITED,
AdtId::StructId(s) => {
let struct_data = self.db.struct_data(s);
self.visit_variant(s.into(), &struct_data.variant_data, subst, &attrs, is_local)
}
AdtId::StructId(s) => self.visit_variant(s.into(), subst),
AdtId::EnumId(e) => {
let enum_data = self.db.enum_data(e);
for &(variant, _) in enum_data.variants.iter() {
let variant_inhabitedness = self.visit_variant(
variant.into(),
&self.db.enum_variant_data(variant).variant_data,
subst,
&self.db.attrs(variant.into()),
is_local,
);
let variant_inhabitedness = self.visit_variant(variant.into(), subst);
match variant_inhabitedness {
Break(VisiblyUninhabited) => continue,
Break(VisiblyUninhabited) => (),
Continue(()) => return CONTINUE_OPAQUELY_INHABITED,
}
}
@ -137,34 +115,36 @@ impl UninhabitedFrom<'_> {
fn visit_variant(
&mut self,
variant: VariantId,
variant_data: &VariantData,
subst: &Substitution,
attrs: &Attrs,
is_local: bool,
) -> ControlFlow<VisiblyUninhabited> {
let non_exhaustive_field_list = attrs.by_key("non_exhaustive").exists();
if non_exhaustive_field_list && !is_local {
let is_local = variant.krate(self.db.upcast()) == self.target_mod.krate();
if !is_local && self.db.attrs(variant.into()).by_key("non_exhaustive").exists() {
return CONTINUE_OPAQUELY_INHABITED;
}
let variant_data = self.db.variant_data(variant);
let fields = variant_data.fields();
if fields.is_empty() {
return CONTINUE_OPAQUELY_INHABITED;
}
let is_enum = matches!(variant, VariantId::EnumVariantId(..));
let field_tys = self.db.field_types(variant);
let field_vis = self.db.field_visibilities(variant);
let field_vis = if is_enum { None } else { Some(self.db.field_visibilities(variant)) };
for (fid, _) in variant_data.fields().iter() {
self.visit_field(field_vis[fid], &field_tys[fid], subst, is_enum)?;
for (fid, _) in fields.iter() {
self.visit_field(field_vis.as_ref().map(|it| it[fid]), &field_tys[fid], subst)?;
}
CONTINUE_OPAQUELY_INHABITED
}
fn visit_field(
&mut self,
vis: Visibility,
vis: Option<Visibility>,
ty: &Binders<Ty>,
subst: &Substitution,
is_enum: bool,
) -> ControlFlow<VisiblyUninhabited> {
if is_enum || vis.is_visible_from(self.db.upcast(), self.target_mod) {
if vis.map_or(true, |it| it.is_visible_from(self.db.upcast(), self.target_mod)) {
let ty = ty.clone().substitute(Interner, subst);
ty.visit_with(self, DebruijnIndex::INNERMOST)
} else {

View file

@ -56,7 +56,6 @@ use base_db::salsa::impl_intern_value_trivial;
use chalk_ir::{
fold::{Shift, TypeFoldable},
interner::HasInterner,
visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
NoSolution,
};
use either::Either;
@ -98,7 +97,9 @@ pub use traits::TraitEnvironment;
pub use utils::{all_super_traits, is_fn_unsafe_to_call};
pub use chalk_ir::{
cast::Cast, AdtId, BoundVar, DebruijnIndex, Mutability, Safety, Scalar, TyVariableKind,
cast::Cast,
visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
AdtId, BoundVar, DebruijnIndex, Mutability, Safety, Scalar, TyVariableKind,
};
pub type ForeignDefId = chalk_ir::ForeignDefId<Interner>;
@ -288,7 +289,7 @@ impl Hash for ConstScalar {
/// Return an index of a parameter in the generic type parameter list by it's id.
pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<usize> {
generics(db.upcast(), id.parent).param_idx(id)
generics(db.upcast(), id.parent).type_or_const_param_idx(id)
}
pub(crate) fn wrap_empty_binders<T>(value: T) -> Binders<T>
@ -603,14 +604,14 @@ pub enum ImplTraitId {
}
impl_intern_value_trivial!(ImplTraitId);
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
#[derive(PartialEq, Eq, Debug, Hash)]
pub struct ImplTraits {
pub(crate) impl_traits: Arena<ImplTrait>,
}
has_interner!(ImplTraits);
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
#[derive(PartialEq, Eq, Debug, Hash)]
pub struct ImplTrait {
pub(crate) bounds: Binders<Vec<QuantifiedWhereClause>>,
}
@ -622,7 +623,7 @@ pub fn static_lifetime() -> Lifetime {
}
pub fn error_lifetime() -> Lifetime {
static_lifetime()
LifetimeData::Error.intern(Interner)
}
pub(crate) fn fold_free_vars<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>>(
@ -861,7 +862,7 @@ where
if cfg!(debug_assertions) {
Err(NoSolution)
} else {
Ok(static_lifetime())
Ok(error_lifetime())
}
}
@ -873,7 +874,7 @@ where
if cfg!(debug_assertions) {
Err(NoSolution)
} else {
Ok(static_lifetime())
Ok(error_lifetime())
}
}
}

View file

@ -15,7 +15,10 @@ use base_db::{
CrateId,
};
use chalk_ir::{
cast::Cast, fold::Shift, fold::TypeFoldable, interner::HasInterner, Mutability, Safety,
cast::Cast,
fold::{Shift, TypeFoldable},
interner::HasInterner,
Mutability, Safety, TypeOutlives,
};
use either::Either;
@ -59,14 +62,14 @@ use crate::{
mapping::{from_chalk_trait_id, lt_to_placeholder_idx, ToChalk},
static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx,
utils::{
all_super_trait_refs, associated_type_by_name_including_super_traits, generics, Generics,
InTypeConstIdMetadata,
self, all_super_trait_refs, associated_type_by_name_including_super_traits, generics,
Generics, InTypeConstIdMetadata,
},
AliasEq, AliasTy, Binders, BoundVar, CallableSig, Const, ConstScalar, DebruijnIndex, DynTy,
FnAbi, FnPointer, FnSig, FnSubst, ImplTrait, ImplTraitId, ImplTraits, Interner, Lifetime,
LifetimeData, ParamKind, PolyFnSig, ProjectionTy, QuantifiedWhereClause,
QuantifiedWhereClauses, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder,
TyKind, WhereClause,
LifetimeData, LifetimeOutlives, ParamKind, PolyFnSig, ProgramClause, ProjectionTy,
QuantifiedWhereClause, QuantifiedWhereClauses, Substitution, TraitEnvironment, TraitRef,
TraitRefExt, Ty, TyBuilder, TyKind, WhereClause,
};
#[derive(Debug)]
@ -242,13 +245,8 @@ impl<'a> TyLoweringContext<'a> {
)
}
fn generics(&self) -> Generics {
generics(
self.db.upcast(),
self.resolver
.generic_def()
.expect("there should be generics if there's a generic param"),
)
fn generics(&self) -> Option<Generics> {
Some(generics(self.db.upcast(), self.resolver.generic_def()?))
}
pub fn lower_ty_ext(&self, type_ref: &TypeRef) -> (Ty, Option<TypeNs>) {
@ -282,7 +280,7 @@ impl<'a> TyLoweringContext<'a> {
let inner_ty = self.lower_ty(inner);
// FIXME: It should infer the eldided lifetimes instead of stubbing with static
let lifetime =
lifetime.as_ref().map_or_else(static_lifetime, |lr| self.lower_lifetime(lr));
lifetime.as_ref().map_or_else(error_lifetime, |lr| self.lower_lifetime(lr));
TyKind::Ref(lower_to_chalk_mutability(*mutability), lifetime, inner_ty)
.intern(Interner)
}
@ -318,7 +316,7 @@ impl<'a> TyLoweringContext<'a> {
// place even if we encounter more opaque types while
// lowering the bounds
let idx = opaque_type_data.borrow_mut().alloc(ImplTrait {
bounds: crate::make_single_type_binders(Vec::new()),
bounds: crate::make_single_type_binders(Vec::default()),
});
// We don't want to lower the bounds inside the binders
// we're currently in, because they don't end up inside
@ -349,8 +347,7 @@ impl<'a> TyLoweringContext<'a> {
let idx = counter.get();
// FIXME we're probably doing something wrong here
counter.set(idx + count_impl_traits(type_ref) as u16);
if let Some(def) = self.resolver.generic_def() {
let generics = generics(self.db.upcast(), def);
if let Some(generics) = self.generics() {
let param = generics
.iter()
.filter(|(_, data)| {
@ -385,8 +382,7 @@ impl<'a> TyLoweringContext<'a> {
const_params,
_impl_trait_params,
_lifetime_params,
) = if let Some(def) = self.resolver.generic_def() {
let generics = generics(self.db.upcast(), def);
) = if let Some(generics) = self.generics() {
generics.provenance_split()
} else {
(0, 0, 0, 0, 0, 0)
@ -574,44 +570,40 @@ impl<'a> TyLoweringContext<'a> {
// FIXME(trait_alias): Implement trait alias.
return (TyKind::Error.intern(Interner), None);
}
TypeNs::GenericParam(param_id) => {
let generics = generics(
self.db.upcast(),
self.resolver.generic_def().expect("generics in scope"),
);
match self.type_param_mode {
ParamLoweringMode::Placeholder => {
TyKind::Placeholder(to_placeholder_idx(self.db, param_id.into()))
}
ParamLoweringMode::Variable => {
let idx = match generics.param_idx(param_id.into()) {
None => {
never!("no matching generics");
return (TyKind::Error.intern(Interner), None);
}
Some(idx) => idx,
};
TyKind::BoundVar(BoundVar::new(self.in_binders, idx))
}
TypeNs::GenericParam(param_id) => match self.type_param_mode {
ParamLoweringMode::Placeholder => {
TyKind::Placeholder(to_placeholder_idx(self.db, param_id.into()))
}
ParamLoweringMode::Variable => {
let idx = match self
.generics()
.expect("generics in scope")
.type_or_const_param_idx(param_id.into())
{
None => {
never!("no matching generics");
return (TyKind::Error.intern(Interner), None);
}
Some(idx) => idx,
};
TyKind::BoundVar(BoundVar::new(self.in_binders, idx))
}
.intern(Interner)
}
.intern(Interner),
TypeNs::SelfType(impl_id) => {
let def =
self.resolver.generic_def().expect("impl should have generic param scope");
let generics = generics(self.db.upcast(), def);
let generics = self.generics().expect("impl should have generic param scope");
match self.type_param_mode {
ParamLoweringMode::Placeholder => {
// `def` can be either impl itself or item within, and we need impl itself
// now.
let generics = generics.parent_generics().unwrap_or(&generics);
let generics = generics.parent_or_self();
let subst = generics.placeholder_subst(self.db);
self.db.impl_self_ty(impl_id).substitute(Interner, &subst)
}
ParamLoweringMode::Variable => {
let starting_from = match def {
let starting_from = match generics.def() {
GenericDefId::ImplId(_) => 0,
// `def` is an item within impl. We need to substitute `BoundVar`s but
// remember that they are for parent (i.e. impl) generic params so they
@ -679,12 +671,12 @@ impl<'a> TyLoweringContext<'a> {
}
fn select_associated_type(&self, res: Option<TypeNs>, segment: PathSegment<'_>) -> Ty {
let Some((def, res)) = self.resolver.generic_def().zip(res) else {
let Some((generics, res)) = self.generics().zip(res) else {
return TyKind::Error.intern(Interner);
};
let ty = named_associated_type_shorthand_candidates(
self.db,
def,
generics.def(),
res,
Some(segment.name.clone()),
move |name, t, associated_ty| {
@ -696,7 +688,6 @@ impl<'a> TyLoweringContext<'a> {
let parent_subst = match self.type_param_mode {
ParamLoweringMode::Placeholder => {
// if we're lowering to placeholders, we have to put them in now.
let generics = generics(self.db.upcast(), def);
let s = generics.placeholder_subst(self.db);
s.apply(parent_subst, Interner)
}
@ -718,7 +709,7 @@ impl<'a> TyLoweringContext<'a> {
None,
);
let len_self = generics(self.db.upcast(), associated_ty.into()).len_self();
let len_self = utils::generics(self.db.upcast(), associated_ty.into()).len_self();
let substs = Substitution::from_iter(
Interner,
@ -1016,40 +1007,43 @@ impl<'a> TyLoweringContext<'a> {
self.substs_from_path_segment(segment, Some(resolved.into()), false, explicit_self_ty)
}
pub(crate) fn lower_where_predicate(
&self,
where_predicate: &WherePredicate,
pub(crate) fn lower_where_predicate<'b>(
&'b self,
where_predicate: &'b WherePredicate,
ignore_bindings: bool,
) -> impl Iterator<Item = QuantifiedWhereClause> {
) -> impl Iterator<Item = QuantifiedWhereClause> + 'b {
match where_predicate {
WherePredicate::ForLifetime { target, bound, .. }
| WherePredicate::TypeBound { target, bound } => {
let self_ty = match target {
WherePredicateTypeTarget::TypeRef(type_ref) => self.lower_ty(type_ref),
WherePredicateTypeTarget::TypeOrConstParam(param_id) => {
let generic_def = self.resolver.generic_def().expect("generics in scope");
let generics = generics(self.db.upcast(), generic_def);
let param_id = hir_def::TypeOrConstParamId {
parent: generic_def,
local_id: *param_id,
};
let placeholder = to_placeholder_idx(self.db, param_id);
&WherePredicateTypeTarget::TypeOrConstParam(local_id) => {
let def = self.resolver.generic_def().expect("generics in scope");
let param_id = hir_def::TypeOrConstParamId { parent: def, local_id };
match self.type_param_mode {
ParamLoweringMode::Placeholder => TyKind::Placeholder(placeholder),
ParamLoweringMode::Placeholder => {
TyKind::Placeholder(to_placeholder_idx(self.db, param_id))
}
ParamLoweringMode::Variable => {
let idx = generics.param_idx(param_id).expect("matching generics");
let idx = generics(self.db.upcast(), def)
.type_or_const_param_idx(param_id)
.expect("matching generics");
TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, idx))
}
}
.intern(Interner)
}
};
self.lower_type_bound(bound, self_ty, ignore_bindings)
.collect::<Vec<_>>()
.into_iter()
Either::Left(self.lower_type_bound(bound, self_ty, ignore_bindings))
}
WherePredicate::Lifetime { .. } => vec![].into_iter(),
WherePredicate::Lifetime { bound, target } => Either::Right(iter::once(
crate::wrap_empty_binders(WhereClause::LifetimeOutlives(LifetimeOutlives {
a: self.lower_lifetime(bound),
b: self.lower_lifetime(target),
})),
)),
}
.into_iter()
}
pub(crate) fn lower_type_bound(
@ -1058,11 +1052,11 @@ impl<'a> TyLoweringContext<'a> {
self_ty: Ty,
ignore_bindings: bool,
) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
let mut bindings = None;
let trait_ref = match bound.as_ref() {
let mut trait_ref = None;
let clause = match bound.as_ref() {
TypeBound::Path(path, TraitBoundModifier::None) => {
bindings = self.lower_trait_ref_from_path(path, Some(self_ty));
bindings
trait_ref = self.lower_trait_ref_from_path(path, Some(self_ty));
trait_ref
.clone()
.filter(|tr| {
// ignore `T: Drop` or `T: Destruct` bounds.
@ -1098,14 +1092,20 @@ impl<'a> TyLoweringContext<'a> {
}
TypeBound::ForLifetime(_, path) => {
// FIXME Don't silently drop the hrtb lifetimes here
bindings = self.lower_trait_ref_from_path(path, Some(self_ty));
bindings.clone().map(WhereClause::Implemented).map(crate::wrap_empty_binders)
trait_ref = self.lower_trait_ref_from_path(path, Some(self_ty));
trait_ref.clone().map(WhereClause::Implemented).map(crate::wrap_empty_binders)
}
TypeBound::Lifetime(l) => {
let lifetime = self.lower_lifetime(l);
Some(crate::wrap_empty_binders(WhereClause::TypeOutlives(TypeOutlives {
ty: self_ty,
lifetime,
})))
}
TypeBound::Lifetime(_) => None,
TypeBound::Error => None,
};
trait_ref.into_iter().chain(
bindings
clause.into_iter().chain(
trait_ref
.into_iter()
.filter(move |_| !ignore_bindings)
.flat_map(move |tr| self.assoc_type_bindings_from_type_bound(bound, tr)),
@ -1203,8 +1203,8 @@ impl<'a> TyLoweringContext<'a> {
});
if let Some(target_param_idx) = target_param_idx {
let mut counter = 0;
for (idx, data) in self.generics().params.type_or_consts.iter()
{
let generics = self.generics().expect("generics in scope");
for (idx, data) in generics.params.type_or_consts.iter() {
// Count the number of `impl Trait` things that appear before
// the target of our `bound`.
// Our counter within `impl_trait_mode` should be that number
@ -1264,10 +1264,19 @@ impl<'a> TyLoweringContext<'a> {
// bounds in the input.
// INVARIANT: If this function returns `DynTy`, there should be at least one trait bound.
// These invariants are utilized by `TyExt::dyn_trait()` and chalk.
let mut lifetime = None;
let bounds = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
let mut bounds: Vec<_> = bounds
.iter()
.flat_map(|b| ctx.lower_type_bound(b, self_ty.clone(), false))
.filter(|b| match b.skip_binders() {
WhereClause::Implemented(_) | WhereClause::AliasEq(_) => true,
WhereClause::LifetimeOutlives(_) => false,
WhereClause::TypeOutlives(t) => {
lifetime = Some(t.lifetime.clone());
false
}
})
.collect();
let mut multiple_regular_traits = false;
@ -1305,7 +1314,7 @@ impl<'a> TyLoweringContext<'a> {
_ => unreachable!(),
}
}
// We don't produce `WhereClause::{TypeOutlives, LifetimeOutlives}` yet.
// `WhereClause::{TypeOutlives, LifetimeOutlives}` have been filtered out
_ => unreachable!(),
}
});
@ -1325,7 +1334,21 @@ impl<'a> TyLoweringContext<'a> {
if let Some(bounds) = bounds {
let bounds = crate::make_single_type_binders(bounds);
TyKind::Dyn(DynTy { bounds, lifetime: static_lifetime() }).intern(Interner)
TyKind::Dyn(DynTy {
bounds,
lifetime: match lifetime {
Some(it) => match it.bound_var(Interner) {
Some(bound_var) => LifetimeData::BoundVar(BoundVar::new(
DebruijnIndex::INNERMOST,
bound_var.index,
))
.intern(Interner),
None => it,
},
None => static_lifetime(),
},
})
.intern(Interner)
} else {
// FIXME: report error
// (additional non-auto traits, associated type rebound, or no resolved trait)
@ -1355,8 +1378,8 @@ impl<'a> TyLoweringContext<'a> {
crate::wrap_empty_binders(clause)
});
predicates.extend(sized_clause);
predicates.shrink_to_fit();
}
predicates.shrink_to_fit();
predicates
});
ImplTrait { bounds: crate::make_single_type_binders(predicates) }
@ -1371,10 +1394,7 @@ impl<'a> TyLoweringContext<'a> {
LifetimeData::Placeholder(lt_to_placeholder_idx(self.db, id))
}
ParamLoweringMode::Variable => {
let generics = generics(
self.db.upcast(),
self.resolver.generic_def().expect("generics in scope"),
);
let generics = self.generics().expect("generics in scope");
let idx = match generics.lifetime_idx(id) {
None => return error_lifetime(),
Some(idx) => idx,
@ -1485,7 +1505,7 @@ fn named_associated_type_shorthand_candidates<R>(
// Handle `Self::Type` referring to own associated type in trait definitions
if let GenericDefId::TraitId(trait_id) = param_id.parent() {
let trait_generics = generics(db.upcast(), trait_id.into());
if trait_generics.params.type_or_consts[param_id.local_id()].is_trait_self() {
if trait_generics.params[param_id.local_id()].is_trait_self() {
let def_generics = generics(db.upcast(), def);
let starting_idx = match def {
GenericDefId::TraitId(_) => 0,
@ -1604,10 +1624,14 @@ pub(crate) fn generic_predicates_for_param_query(
let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
let explicitly_unsized_tys = ctx.unsized_types.into_inner();
let implicitly_sized_predicates =
if let Some(implicitly_sized_predicates) =
implicitly_sized_clauses(db, param_id.parent, &explicitly_unsized_tys, &subst, &resolver)
.map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p)));
predicates.extend(implicitly_sized_predicates);
{
predicates.extend(
implicitly_sized_predicates
.map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p))),
);
}
predicates.into()
}
@ -1657,18 +1681,7 @@ pub(crate) fn trait_environment_query(
}
}
let container: Option<ItemContainerId> = match def {
// FIXME: is there a function for this?
GenericDefId::FunctionId(f) => Some(f.lookup(db.upcast()).container),
GenericDefId::AdtId(_) => None,
GenericDefId::TraitId(_) => None,
GenericDefId::TraitAliasId(_) => None,
GenericDefId::TypeAliasId(t) => Some(t.lookup(db.upcast()).container),
GenericDefId::ImplId(_) => None,
GenericDefId::EnumVariantId(_) => None,
GenericDefId::ConstId(c) => Some(c.lookup(db.upcast()).container),
};
if let Some(ItemContainerId::TraitId(trait_id)) = container {
if let Some(trait_id) = def.assoc_trait_container(db.upcast()) {
// add `Self: Trait<T1, T2, ...>` to the environment in trait
// function default implementations (and speculative code
// inside consts or type aliases)
@ -1676,24 +1689,23 @@ pub(crate) fn trait_environment_query(
let substs = TyBuilder::placeholder_subst(db, trait_id);
let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution: substs };
let pred = WhereClause::Implemented(trait_ref);
let program_clause: chalk_ir::ProgramClause<Interner> = pred.cast(Interner);
clauses.push(program_clause.into_from_env_clause(Interner));
clauses.push(pred.cast::<ProgramClause>(Interner).into_from_env_clause(Interner));
}
let subst = generics(db.upcast(), def).placeholder_subst(db);
let explicitly_unsized_tys = ctx.unsized_types.into_inner();
let implicitly_sized_clauses =
implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver).map(|pred| {
let program_clause: chalk_ir::ProgramClause<Interner> = pred.cast(Interner);
program_clause.into_from_env_clause(Interner)
});
clauses.extend(implicitly_sized_clauses);
let krate = def.module(db.upcast()).krate();
if let Some(implicitly_sized_clauses) =
implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver)
{
clauses.extend(
implicitly_sized_clauses
.map(|pred| pred.cast::<ProgramClause>(Interner).into_from_env_clause(Interner)),
);
}
let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses);
TraitEnvironment::new(krate, None, traits_in_scope.into_boxed_slice(), env)
TraitEnvironment::new(resolver.krate(), None, traits_in_scope.into_boxed_slice(), env)
}
/// Resolve the where clause(s) of an item with generics.
@ -1721,10 +1733,14 @@ pub(crate) fn generic_predicates_query(
let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
let explicitly_unsized_tys = ctx.unsized_types.into_inner();
let implicitly_sized_predicates =
if let Some(implicitly_sized_predicates) =
implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver)
.map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p)));
predicates.extend(implicitly_sized_predicates);
{
predicates.extend(
implicitly_sized_predicates
.map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p))),
);
}
predicates.into()
}
@ -1736,24 +1752,24 @@ fn implicitly_sized_clauses<'a>(
explicitly_unsized_tys: &'a FxHashSet<Ty>,
substitution: &'a Substitution,
resolver: &Resolver,
) -> impl Iterator<Item = WhereClause> + 'a {
) -> Option<impl Iterator<Item = WhereClause> + 'a> {
let is_trait_def = matches!(def, GenericDefId::TraitId(..));
let generic_args = &substitution.as_slice(Interner)[is_trait_def as usize..];
let sized_trait = db
.lang_item(resolver.krate(), LangItem::Sized)
.and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
sized_trait.into_iter().flat_map(move |sized_trait| {
let implicitly_sized_tys = generic_args
sized_trait.map(move |sized_trait| {
generic_args
.iter()
.filter_map(|generic_arg| generic_arg.ty(Interner))
.filter(move |&self_ty| !explicitly_unsized_tys.contains(self_ty));
implicitly_sized_tys.map(move |self_ty| {
WhereClause::Implemented(TraitRef {
trait_id: sized_trait,
substitution: Substitution::from1(Interner, self_ty.clone()),
.filter(move |&self_ty| !explicitly_unsized_tys.contains(self_ty))
.map(move |self_ty| {
WhereClause::Implemented(TraitRef {
trait_id: sized_trait,
substitution: Substitution::from1(Interner, self_ty.clone()),
})
})
})
})
}
@ -1796,8 +1812,7 @@ pub(crate) fn generic_defaults_query(
make_binders(db, &generic_params, val)
}
GenericParamDataRef::LifetimeParamData(_) => {
// using static because it requires defaults
make_binders(db, &generic_params, static_lifetime().cast(Interner))
make_binders(db, &generic_params, error_lifetime().cast(Interner))
}
}
}));
@ -1817,7 +1832,7 @@ pub(crate) fn generic_defaults_recover(
let val = match id {
GenericParamId::TypeParamId(_) => TyKind::Error.intern(Interner).cast(Interner),
GenericParamId::ConstParamId(id) => unknown_const_as_generic(db.const_param_ty(id)),
GenericParamId::LifetimeParamId(_) => static_lifetime().cast(Interner),
GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner),
};
crate::make_binders(db, &generic_params, val)
}));
@ -2232,7 +2247,7 @@ pub(crate) fn const_or_path_to_chalk(
expected_ty: Ty,
value: &ConstRef,
mode: ParamLoweringMode,
args: impl FnOnce() -> Generics,
args: impl FnOnce() -> Option<Generics>,
debruijn: DebruijnIndex,
) -> Const {
match value {
@ -2251,7 +2266,7 @@ pub(crate) fn const_or_path_to_chalk(
.unwrap_or_else(|| unknown_const(expected_ty))
}
&ConstRef::Complex(it) => {
let crate_data = &db.crate_graph()[owner.module(db.upcast()).krate()];
let crate_data = &db.crate_graph()[resolver.krate()];
if crate_data.env.get("__ra_is_test_fixture").is_none() && crate_data.origin.is_local()
{
// FIXME: current `InTypeConstId` is very unstable, so we only use it in non local crate

View file

@ -22,10 +22,10 @@ use triomphe::Arc;
use crate::{
autoderef::{self, AutoderefKind},
db::HirDatabase,
from_chalk_trait_id, from_foreign_def_id,
error_lifetime, from_chalk_trait_id, from_foreign_def_id,
infer::{unify::InferenceTable, Adjust, Adjustment, OverloadedDeref, PointerCast},
primitive::{FloatTy, IntTy, UintTy},
static_lifetime, to_chalk_trait_id,
to_chalk_trait_id,
utils::all_super_traits,
AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, DynTyExt, ForeignDefId, Goal, Guidance,
InEnvironment, Interner, Scalar, Solution, Substitution, TraitEnvironment, TraitRef,
@ -1035,7 +1035,7 @@ fn iterate_method_candidates_with_autoref(
iterate_method_candidates_by_receiver(receiver_ty.clone(), maybe_reborrowed)?;
let refed = Canonical {
value: TyKind::Ref(Mutability::Not, static_lifetime(), receiver_ty.value.clone())
value: TyKind::Ref(Mutability::Not, error_lifetime(), receiver_ty.value.clone())
.intern(Interner),
binders: receiver_ty.binders.clone(),
};
@ -1043,7 +1043,7 @@ fn iterate_method_candidates_with_autoref(
iterate_method_candidates_by_receiver(refed, first_adjustment.with_autoref(Mutability::Not))?;
let ref_muted = Canonical {
value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value.clone())
value: TyKind::Ref(Mutability::Mut, error_lifetime(), receiver_ty.value.clone())
.intern(Interner),
binders: receiver_ty.binders,
};
@ -1369,6 +1369,7 @@ pub(crate) fn resolve_indexing_op(
None
}
// FIXME: Replace this with a `Try` impl once stable
macro_rules! check_that {
($cond:expr) => {
if !$cond {
@ -1377,6 +1378,7 @@ macro_rules! check_that {
};
}
#[derive(Debug)]
enum IsValidCandidate {
Yes,
No,

View file

@ -9,11 +9,14 @@ use hir_def::{
resolver::HasResolver,
};
use crate::mir::eval::{
name, pad16, static_lifetime, Address, AdtId, Arc, BuiltinType, Evaluator, FunctionId,
HasModule, HirDisplay, Interned, InternedClosure, Interner, Interval, IntervalAndTy,
IntervalOrOwned, ItemContainerId, LangItem, Layout, Locals, Lookup, MirEvalError, MirSpan,
Mutability, Result, Substitution, Ty, TyBuilder, TyExt,
use crate::{
error_lifetime,
mir::eval::{
name, pad16, Address, AdtId, Arc, BuiltinType, Evaluator, FunctionId, HasModule,
HirDisplay, Interned, InternedClosure, Interner, Interval, IntervalAndTy, IntervalOrOwned,
ItemContainerId, LangItem, Layout, Locals, Lookup, MirEvalError, MirSpan, Mutability,
Result, Substitution, Ty, TyBuilder, TyExt,
},
};
mod simd;
@ -247,7 +250,7 @@ impl Evaluator<'_> {
let tmp = self.heap_allocate(self.ptr_size(), self.ptr_size())?;
let arg = IntervalAndTy {
interval: Interval { addr: tmp, size: self.ptr_size() },
ty: TyKind::Ref(Mutability::Not, static_lifetime(), ty.clone()).intern(Interner),
ty: TyKind::Ref(Mutability::Not, error_lifetime(), ty.clone()).intern(Interner),
};
let offset = layout.fields.offset(i).bytes_usize();
self.write_memory(tmp, &addr.offset(offset).to_bytes())?;

View file

@ -27,6 +27,7 @@ use crate::{
consteval::ConstEvalError,
db::{HirDatabase, InternedClosure},
display::HirDisplay,
error_lifetime,
infer::{CaptureKind, CapturedItem, TypeMismatch},
inhabitedness::is_ty_uninhabited_from,
layout::LayoutError,
@ -90,7 +91,7 @@ pub enum MirLowerError {
UnresolvedField,
UnsizedTemporary(Ty),
MissingFunctionDefinition(DefWithBodyId, ExprId),
TypeMismatch(TypeMismatch),
TypeMismatch(Option<TypeMismatch>),
/// This should never happen. Type mismatch should catch everything.
TypeError(&'static str),
NotSupported(String),
@ -170,14 +171,15 @@ impl MirLowerError {
body.pretty_print_expr(db.upcast(), *owner, *it)
)?;
}
MirLowerError::TypeMismatch(e) => {
writeln!(
MirLowerError::TypeMismatch(e) => match e {
Some(e) => writeln!(
f,
"Type mismatch: Expected {}, found {}",
e.expected.display(db),
e.actual.display(db),
)?;
}
)?,
None => writeln!(f, "Type mismatch: types mismatch with {{unknown}}",)?,
},
MirLowerError::GenericArgNotProvided(id, subst) => {
let parent = id.parent;
let param = &db.generic_params(parent).type_or_consts[id.local_id];
@ -493,9 +495,11 @@ impl<'ctx> MirLowerCtx<'ctx> {
ty,
value: chalk_ir::ConstValue::BoundVar(BoundVar::new(
DebruijnIndex::INNERMOST,
gen.param_idx(p.into()).ok_or(MirLowerError::TypeError(
"fail to lower const generic param",
))?,
gen.type_or_const_param_idx(p.into()).ok_or(
MirLowerError::TypeError(
"fail to lower const generic param",
),
)?,
)),
}
.intern(Interner),
@ -1702,7 +1706,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
fn is_uninhabited(&self, expr_id: ExprId) -> bool {
is_ty_uninhabited_from(&self.infer[expr_id], self.owner.module(self.db.upcast()), self.db)
is_ty_uninhabited_from(self.db, &self.infer[expr_id], self.owner.module(self.db.upcast()))
}
/// This function push `StorageLive` statement for the binding, and applies changes to add `StorageDead` and
@ -2032,10 +2036,12 @@ pub fn mir_body_for_closure_query(
let closure_local = ctx.result.locals.alloc(Local {
ty: match kind {
FnTrait::FnOnce => infer[expr].clone(),
FnTrait::FnMut => TyKind::Ref(Mutability::Mut, static_lifetime(), infer[expr].clone())
.intern(Interner),
FnTrait::Fn => TyKind::Ref(Mutability::Not, static_lifetime(), infer[expr].clone())
.intern(Interner),
FnTrait::FnMut => {
TyKind::Ref(Mutability::Mut, error_lifetime(), infer[expr].clone()).intern(Interner)
}
FnTrait::Fn => {
TyKind::Ref(Mutability::Not, error_lifetime(), infer[expr].clone()).intern(Interner)
}
},
});
ctx.result.param_locals.push(closure_local);
@ -2152,8 +2158,10 @@ pub fn lower_to_mir(
// need to take this input explicitly.
root_expr: ExprId,
) -> Result<MirBody> {
if let Some((_, it)) = infer.type_mismatches().next() {
return Err(MirLowerError::TypeMismatch(it.clone()));
if infer.has_errors {
return Err(MirLowerError::TypeMismatch(
infer.type_mismatches().next().map(|(_, it)| it.clone()),
));
}
let mut ctx = MirLowerCtx::new(db, owner, body, infer);
// 0 is return local

View file

@ -290,7 +290,7 @@ impl MirLowerCtx<'_> {
Some((_, _, mutability)) => mutability,
None => Mutability::Not,
};
let result_ref = TyKind::Ref(mutability, static_lifetime(), result_ty).intern(Interner);
let result_ref = TyKind::Ref(mutability, error_lifetime(), result_ty).intern(Interner);
let mut result: Place = self.temp(result_ref, current, span)?.into();
let index_fn_op = Operand::const_zst(
TyKind::FnDef(
@ -333,8 +333,8 @@ impl MirLowerCtx<'_> {
BorrowKind::Mut { kind: MutBorrowKind::Default },
)
};
let ty_ref = TyKind::Ref(chalk_mut, static_lifetime(), source_ty.clone()).intern(Interner);
let target_ty_ref = TyKind::Ref(chalk_mut, static_lifetime(), target_ty).intern(Interner);
let ty_ref = TyKind::Ref(chalk_mut, error_lifetime(), source_ty.clone()).intern(Interner);
let target_ty_ref = TyKind::Ref(chalk_mut, error_lifetime(), target_ty).intern(Interner);
let ref_place: Place = self.temp(ty_ref, current, span)?.into();
self.push_assignment(current, ref_place, Rvalue::Ref(borrow_kind, place), span);
let deref_trait = self

View file

@ -101,7 +101,7 @@ impl FallibleTypeFolder<Interner> for Filler<'_> {
_outer_binder: DebruijnIndex,
) -> std::result::Result<chalk_ir::Const<Interner>, Self::Error> {
let it = from_placeholder_idx(self.db, idx);
let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(it)) else {
let Some(idx) = self.generics.as_ref().and_then(|g| g.type_or_const_param_idx(it)) else {
not_supported!("missing idx in generics");
};
Ok(self
@ -119,7 +119,7 @@ impl FallibleTypeFolder<Interner> for Filler<'_> {
_outer_binder: DebruijnIndex,
) -> std::result::Result<Ty, Self::Error> {
let it = from_placeholder_idx(self.db, idx);
let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(it)) else {
let Some(idx) = self.generics.as_ref().and_then(|g| g.type_or_const_param_idx(it)) else {
not_supported!("missing idx in generics");
};
Ok(self

View file

@ -136,3 +136,20 @@ impl Trait for () {
"#,
);
}
#[test]
fn no_mismatches_with_unresolved_projections() {
check_no_mismatches(
r#"
// `Thing` is `{unknown}`
fn create() -> Option<(i32, Thing)> {
Some((69420, Thing))
}
fn consume() -> Option<()> {
let (number, thing) = create()?;
Some(())
}
"#,
);
}

View file

@ -85,7 +85,7 @@ fn render_dyn_for_ty() {
trait Foo<'a> {}
fn foo(foo: &dyn for<'a> Foo<'a>) {}
// ^^^ &dyn Foo<'static>
// ^^^ &dyn Foo<'_>
"#,
);
}

View file

@ -1109,7 +1109,7 @@ fn var_args() {
#[lang = "va_list"]
pub struct VaListImpl<'f>;
fn my_fn(foo: ...) {}
//^^^ VaListImpl<'static>
//^^^ VaListImpl<'{error}>
"#,
);
}

View file

@ -896,13 +896,13 @@ fn flush(&self) {
"#,
expect![[r#"
123..127 'self': &Mutex<T>
150..152 '{}': MutexGuard<'static, T>
150..152 '{}': MutexGuard<'{error}, T>
234..238 'self': &{unknown}
240..290 '{ ...()); }': ()
250..251 'w': &Mutex<BufWriter>
276..287 '*(w.lock())': BufWriter
278..279 'w': &Mutex<BufWriter>
278..286 'w.lock()': MutexGuard<'static, BufWriter>
278..286 'w.lock()': MutexGuard<'{error}, BufWriter>
"#]],
);
}

View file

@ -3092,7 +3092,7 @@ fn main() {
389..394 'boxed': Box<Foo<i32>>
389..406 'boxed....nner()': &i32
416..421 'good1': &i32
424..438 'Foo::get_inner': fn get_inner<i32, 'static>(&Box<Foo<i32>>) -> &i32
424..438 'Foo::get_inner': fn get_inner<i32, '{error}>(&Box<Foo<i32>>) -> &i32
424..446 'Foo::g...boxed)': &i32
439..445 '&boxed': &Box<Foo<i32>>
440..445 'boxed': Box<Foo<i32>>
@ -3100,7 +3100,7 @@ fn main() {
464..469 'boxed': Box<Foo<i32>>
464..480 'boxed....self()': &Foo<i32>
490..495 'good2': &Foo<i32>
498..511 'Foo::get_self': fn get_self<i32, 'static>(&Box<Foo<i32>>) -> &Foo<i32>
498..511 'Foo::get_self': fn get_self<i32, '{error}>(&Box<Foo<i32>>) -> &Foo<i32>
498..519 'Foo::g...boxed)': &Foo<i32>
512..518 '&boxed': &Box<Foo<i32>>
513..518 'boxed': Box<Foo<i32>>
@ -3659,7 +3659,7 @@ fn main() {
let are = "are";
let count = 10;
builtin#format_args("hello {count:02} {} friends, we {are:?} {0}{last}", "fancy", last = "!");
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type: Arguments<'static>
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type: Arguments<'{error}>
}
"#,
);

View file

@ -1602,7 +1602,7 @@ fn weird_bounds() {
r#"
//- minicore: sized
trait Trait {}
fn test(
fn test<'lifetime>(
a: impl Trait + 'lifetime,
b: impl 'lifetime,
c: impl (Trait),
@ -1612,13 +1612,13 @@ fn test(
) {}
"#,
expect![[r#"
28..29 'a': impl Trait
59..60 'b': impl Sized
82..83 'c': impl Trait
103..104 'd': impl Sized
128..129 'e': impl ?Sized
148..149 'f': impl Trait + ?Sized
173..175 '{}': ()
39..40 'a': impl Trait + 'lifetime
70..71 'b': impl 'lifetime
93..94 'c': impl Trait
114..115 'd': impl 'lifetime
139..140 'e': impl ?Sized
159..160 'f': impl Trait + ?Sized
184..186 '{}': ()
"#]],
);
}

View file

@ -262,7 +262,7 @@ impl<'a> ClosureSubst<'a> {
}
}
#[derive(Debug)]
#[derive(Clone, Debug)]
pub(crate) struct Generics {
def: GenericDefId,
pub(crate) params: Interned<GenericParams>,
@ -274,6 +274,10 @@ impl Generics {
self.iter().map(|(id, _)| id)
}
pub(crate) fn def(&self) -> GenericDefId {
self.def
}
/// Iterator over types and const params of self, then parent.
pub(crate) fn iter<'a>(
&'a self,
@ -304,7 +308,11 @@ impl Generics {
};
let lt_iter = self.params.iter_lt().map(from_lt_id(self));
self.params.iter().map(from_toc_id(self)).chain(lt_iter).chain(self.iter_parent())
self.params
.iter_type_or_consts()
.map(from_toc_id(self))
.chain(lt_iter)
.chain(self.iter_parent())
}
/// Iterate over types and const params without parent params.
@ -336,16 +344,19 @@ impl Generics {
}
};
self.params.iter().map(from_toc_id(self)).chain(self.params.iter_lt().map(from_lt_id(self)))
self.params
.iter_type_or_consts()
.map(from_toc_id(self))
.chain(self.params.iter_lt().map(from_lt_id(self)))
}
/// Iterator over types and const params of parent.
#[allow(clippy::needless_lifetimes)]
pub(crate) fn iter_parent<'a>(
&'a self,
) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'a>)> + 'a {
pub(crate) fn iter_parent(
&self,
) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'_>)> + '_ {
self.parent_generics().into_iter().flat_map(|it| {
let from_toc_id = move |(local_id, p): (_, &'a TypeOrConstParamData)| {
let from_toc_id = move |(local_id, p)| {
let p: &_ = p;
let id = TypeOrConstParamId { parent: it.def, local_id };
match p {
TypeOrConstParamData::TypeParamData(p) => (
@ -359,14 +370,14 @@ impl Generics {
}
};
let from_lt_id = move |(local_id, p): (_, &'a LifetimeParamData)| {
let from_lt_id = move |(local_id, p): (_, _)| {
(
GenericParamId::LifetimeParamId(LifetimeParamId { parent: it.def, local_id }),
GenericParamDataRef::LifetimeParamData(p),
)
};
let lt_iter = it.params.iter_lt().map(from_lt_id);
it.params.iter().map(from_toc_id).chain(lt_iter)
it.params.iter_type_or_consts().map(from_toc_id).chain(lt_iter)
})
}
@ -383,7 +394,7 @@ impl Generics {
}
/// Returns number of generic parameter excluding those from parent
fn len_params(&self) -> usize {
fn len_type_and_const_params(&self) -> usize {
self.params.type_or_consts.len()
}
@ -394,7 +405,7 @@ impl Generics {
let mut impl_trait_params = 0;
let mut const_params = 0;
let mut lifetime_params = 0;
self.params.iter().for_each(|(_, data)| match data {
self.params.iter_type_or_consts().for_each(|(_, data)| match data {
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
TypeParamProvenance::TypeParamList => type_params += 1,
TypeParamProvenance::TraitSelf => self_params += 1,
@ -409,18 +420,23 @@ impl Generics {
(parent_len, self_params, type_params, const_params, impl_trait_params, lifetime_params)
}
pub(crate) fn param_idx(&self, param: TypeOrConstParamId) -> Option<usize> {
Some(self.find_param(param)?.0)
pub(crate) fn type_or_const_param_idx(&self, param: TypeOrConstParamId) -> Option<usize> {
Some(self.find_type_or_const_param(param)?.0)
}
fn find_param(&self, param: TypeOrConstParamId) -> Option<(usize, &TypeOrConstParamData)> {
fn find_type_or_const_param(
&self,
param: TypeOrConstParamId,
) -> Option<(usize, &TypeOrConstParamData)> {
if param.parent == self.def {
let (idx, (_local_id, data)) =
self.params.iter().enumerate().find(|(_, (idx, _))| *idx == param.local_id)?;
Some((idx, data))
let idx = param.local_id.into_raw().into_u32() as usize;
if idx >= self.params.type_or_consts.len() {
return None;
}
Some((idx, &self.params.type_or_consts[param.local_id]))
} else {
self.parent_generics()
.and_then(|g| g.find_param(param))
.and_then(|g| g.find_type_or_const_param(param))
// Remember that parent parameters come after parameters for self.
.map(|(idx, data)| (self.len_self() + idx, data))
}
@ -432,13 +448,14 @@ impl Generics {
fn find_lifetime(&self, lifetime: LifetimeParamId) -> Option<(usize, &LifetimeParamData)> {
if lifetime.parent == self.def {
let (idx, (_local_id, data)) = self
.params
.iter_lt()
.enumerate()
.find(|(_, (idx, _))| *idx == lifetime.local_id)?;
Some((self.len_params() + idx, data))
let idx = lifetime.local_id.into_raw().into_u32() as usize;
if idx >= self.params.lifetimes.len() {
return None;
}
Some((
self.len_type_and_const_params() + idx,
&self.params.lifetimes[lifetime.local_id],
))
} else {
self.parent_generics()
.and_then(|g| g.find_lifetime(lifetime))
@ -450,6 +467,10 @@ impl Generics {
self.parent_generics.as_deref()
}
pub(crate) fn parent_or_self(&self) -> &Generics {
self.parent_generics.as_deref().unwrap_or(self)
}
/// Returns a Substitution that replaces each parameter by a bound variable.
pub(crate) fn bound_vars_subst(
&self,

View file

@ -33,7 +33,7 @@ tt.workspace = true
span.workspace = true
[features]
in-rust-tree = []
in-rust-tree = ["hir-expand/in-rust-tree"]
[lints]
workspace = true

View file

@ -4,24 +4,35 @@
//!
//! But we need this for at least LRU caching at the query level.
pub use hir_def::db::{
AttrsQuery, BlockDefMapQuery, BodyQuery, BodyWithSourceMapQuery, ConstDataQuery,
ConstVisibilityQuery, CrateLangItemsQuery, CrateSupportsNoStdQuery, DefDatabase,
DefDatabaseStorage, EnumDataQuery, EnumVariantDataWithDiagnosticsQuery, ExprScopesQuery,
ExternCrateDeclDataQuery, FieldVisibilitiesQuery, FieldsAttrsQuery, FieldsAttrsSourceMapQuery,
FileItemTreeQuery, FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery,
ImplDataWithDiagnosticsQuery, ImportMapQuery, InternAnonymousConstQuery, InternBlockQuery,
InternConstQuery, InternDatabase, InternDatabaseStorage, InternEnumQuery,
InternExternBlockQuery, InternExternCrateQuery, InternFunctionQuery, InternImplQuery,
InternInTypeConstQuery, InternMacro2Query, InternMacroRulesQuery, InternProcMacroQuery,
InternStaticQuery, InternStructQuery, InternTraitAliasQuery, InternTraitQuery,
InternTypeAliasQuery, InternUnionQuery, InternUseQuery, LangItemQuery, Macro2DataQuery,
MacroRulesDataQuery, ProcMacroDataQuery, StaticDataQuery, StructDataWithDiagnosticsQuery,
TraitAliasDataQuery, TraitDataWithDiagnosticsQuery, TypeAliasDataQuery,
UnionDataWithDiagnosticsQuery,
AttrsQuery, BlockDefMapQuery, BlockItemTreeQuery, BodyQuery, BodyWithSourceMapQuery,
ConstDataQuery, ConstVisibilityQuery, CrateDefMapQuery, CrateLangItemsQuery,
CrateNotableTraitsQuery, CrateSupportsNoStdQuery, DefDatabase, DefDatabaseStorage,
EnumDataQuery, EnumVariantDataWithDiagnosticsQuery, ExprScopesQuery, ExternCrateDeclDataQuery,
FieldVisibilitiesQuery, FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery,
FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery, ImplDataWithDiagnosticsQuery,
ImportMapQuery, InternAnonymousConstQuery, InternBlockQuery, InternConstQuery, InternDatabase,
InternDatabaseStorage, InternEnumQuery, InternExternBlockQuery, InternExternCrateQuery,
InternFunctionQuery, InternImplQuery, InternInTypeConstQuery, InternMacro2Query,
InternMacroRulesQuery, InternProcMacroQuery, InternStaticQuery, InternStructQuery,
InternTraitAliasQuery, InternTraitQuery, InternTypeAliasQuery, InternUnionQuery,
InternUseQuery, LangItemQuery, Macro2DataQuery, MacroRulesDataQuery, ProcMacroDataQuery,
StaticDataQuery, StructDataWithDiagnosticsQuery, TraitAliasDataQuery,
TraitDataWithDiagnosticsQuery, TypeAliasDataQuery, UnionDataWithDiagnosticsQuery,
};
pub use hir_expand::db::{
AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery,
ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, ProcMacrosQuery, RealSpanMapQuery,
};
pub use hir_ty::db::*;
pub use hir_ty::db::{
AdtDatumQuery, AdtVarianceQuery, AssociatedTyDataQuery, AssociatedTyValueQuery, BorrowckQuery,
CallableItemSignatureQuery, ConstEvalDiscriminantQuery, ConstEvalQuery, ConstEvalStaticQuery,
ConstParamTyQuery, FieldTypesQuery, FnDefDatumQuery, FnDefVarianceQuery, GenericDefaultsQuery,
GenericPredicatesForParamQuery, GenericPredicatesQuery, HirDatabase, HirDatabaseStorage,
ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, IncoherentInherentImplCratesQuery,
InherentImplsInBlockQuery, InherentImplsInCrateQuery, InternCallableDefQuery,
InternClosureQuery, InternCoroutineQuery, InternImplTraitIdQuery, InternLifetimeParamIdQuery,
InternTypeOrConstParamIdQuery, LayoutOfAdtQuery, MirBodyQuery, ProgramClausesForChalkEnvQuery,
ReturnTypeImplTraitsQuery, TargetDataLayoutQuery, TraitDatumQuery, TraitEnvironmentQuery,
TraitImplsInBlockQuery, TraitImplsInCrateQuery, TraitImplsInDepsQuery, TyQuery, ValueTyQuery,
};

View file

@ -1,4 +1,5 @@
//! HirDisplay implementations for various hir types.
use either::Either;
use hir_def::{
data::adt::{StructKind, VariantData},
generics::{
@ -13,7 +14,7 @@ use hir_ty::{
write_bounds_like_dyn_trait_with_prefix, write_visibility, HirDisplay, HirDisplayError,
HirFormatter, SizedByDefault,
},
Interner, TraitRefExt, WhereClause,
AliasEq, AliasTy, Interner, ProjectionTyExt, TraitRefExt, TyKind, WhereClause,
};
use crate::{
@ -363,16 +364,52 @@ impl HirDisplay for TypeOrConstParam {
impl HirDisplay for TypeParam {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
let params = f.db.generic_params(self.id.parent());
let param_data = &params.type_or_consts[self.id.local_id()];
let substs = TyBuilder::placeholder_subst(f.db, self.id.parent());
let krate = self.id.parent().krate(f.db).id;
let ty =
TyKind::Placeholder(hir_ty::to_placeholder_idx(f.db, self.id.into())).intern(Interner);
let predicates = f.db.generic_predicates(self.id.parent());
let predicates = predicates
.iter()
.cloned()
.map(|pred| pred.substitute(Interner, &substs))
.filter(|wc| match wc.skip_binders() {
WhereClause::Implemented(tr) => tr.self_type_parameter(Interner) == ty,
WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(proj), ty: _ }) => {
proj.self_type_parameter(f.db) == ty
}
WhereClause::AliasEq(_) => false,
WhereClause::TypeOutlives(to) => to.ty == ty,
WhereClause::LifetimeOutlives(_) => false,
})
.collect::<Vec<_>>();
match param_data {
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
write!(f, "{}", p.name.clone().unwrap().display(f.db.upcast()))?
}
TypeParamProvenance::ArgumentImplTrait => {
return write_bounds_like_dyn_trait_with_prefix(
f,
"impl",
Either::Left(&ty),
&predicates,
SizedByDefault::Sized { anchor: krate },
);
}
},
TypeOrConstParamData::ConstParamData(p) => {
write!(f, "{}", p.name.display(f.db.upcast()))?;
}
}
if f.omit_verbose_types() {
return Ok(());
}
let bounds = f.db.generic_predicates_for_param(self.id.parent(), self.id.into(), None);
let substs = TyBuilder::placeholder_subst(f.db, self.id.parent());
let predicates: Vec<_> =
bounds.iter().cloned().map(|b| b.substitute(Interner, &substs)).collect();
let krate = self.id.parent().krate(f.db).id;
let sized_trait =
f.db.lang_item(krate, LangItem::Sized).and_then(|lang_item| lang_item.as_trait());
let has_only_sized_bound = predicates.iter().all(move |pred| match pred.skip_binders() {
@ -382,7 +419,16 @@ impl HirDisplay for TypeParam {
let has_only_not_sized_bound = predicates.is_empty();
if !has_only_sized_bound || has_only_not_sized_bound {
let default_sized = SizedByDefault::Sized { anchor: krate };
write_bounds_like_dyn_trait_with_prefix(f, ":", &predicates, default_sized)?;
write_bounds_like_dyn_trait_with_prefix(
f,
":",
Either::Left(
&hir_ty::TyKind::Placeholder(hir_ty::to_placeholder_idx(f.db, self.id.into()))
.intern(Interner),
),
&predicates,
default_sized,
)?;
}
Ok(())
}

View file

@ -239,7 +239,7 @@ impl Crate {
db: &dyn DefDatabase,
query: import_map::Query,
) -> impl Iterator<Item = Either<ModuleDef, Macro>> {
let _p = tracing::span!(tracing::Level::INFO, "query_external_importables");
let _p = tracing::span!(tracing::Level::INFO, "query_external_importables").entered();
import_map::search_dependencies(db, self.into(), &query).into_iter().map(|item| {
match ItemInNs::from(item) {
ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id),
@ -260,11 +260,11 @@ impl Crate {
doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/")
}
pub fn cfg(&self, db: &dyn HirDatabase) -> CfgOptions {
pub fn cfg(&self, db: &dyn HirDatabase) -> Arc<CfgOptions> {
db.crate_graph()[self.id].cfg_options.clone()
}
pub fn potential_cfg(&self, db: &dyn HirDatabase) -> CfgOptions {
pub fn potential_cfg(&self, db: &dyn HirDatabase) -> Arc<CfgOptions> {
let data = &db.crate_graph()[self.id];
data.potential_cfg_options.clone().unwrap_or_else(|| data.cfg_options.clone())
}
@ -548,8 +548,8 @@ impl Module {
acc: &mut Vec<AnyDiagnostic>,
style_lints: bool,
) {
let name = self.name(db);
let _p = tracing::span!(tracing::Level::INFO, "Module::diagnostics", ?name);
let _p = tracing::span!(tracing::Level::INFO, "Module::diagnostics", name = ?self.name(db))
.entered();
let def_map = self.id.def_map(db.upcast());
for diag in def_map.diagnostics() {
if diag.in_module != self.id.local_id {
@ -653,7 +653,7 @@ impl Module {
GenericParamId::LifetimeParamId(LifetimeParamId { parent, local_id })
});
let type_params = generic_params
.iter()
.iter_type_or_consts()
.filter(|(_, it)| it.type_param().is_some())
.map(|(local_id, _)| {
GenericParamId::TypeParamId(TypeParamId::from_unchecked(
@ -684,7 +684,7 @@ impl Module {
let items = &db.trait_data(trait_.into()).items;
let required_items = items.iter().filter(|&(_, assoc)| match *assoc {
AssocItemId::FunctionId(it) => !db.function_data(it).has_body(),
AssocItemId::ConstId(id) => Const::from(id).value(db).is_none(),
AssocItemId::ConstId(id) => !db.const_data(id).has_body,
AssocItemId::TypeAliasId(it) => db.type_alias_data(it).type_ref.is_none(),
});
impl_assoc_items_scratch.extend(db.impl_data(impl_def.id).items.iter().filter_map(
@ -1418,16 +1418,14 @@ impl Adt {
}
pub fn layout(self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
if !db.generic_params(self.into()).is_empty() {
return Err(LayoutError::HasPlaceholder);
}
let krate = self.krate(db).id;
db.layout_of_adt(
self.into(),
Substitution::empty(Interner),
TyBuilder::adt(db, self.into())
.fill_with_defaults(db, || TyKind::Error.intern(Interner))
.build_into_subst(),
db.trait_environment(self.into()),
)
.map(|layout| Layout(layout, db.target_data_layout(krate).unwrap()))
.map(|layout| Layout(layout, db.target_data_layout(self.krate(db).id).unwrap()))
}
/// Turns this ADT into a type. Any type parameters of the ADT will be
@ -1630,7 +1628,6 @@ impl DefWithBody {
acc: &mut Vec<AnyDiagnostic>,
style_lints: bool,
) {
db.unwind_if_cancelled();
let krate = self.module(db).id.krate();
let (body, source_map) = db.body_with_source_map(self.into());
@ -1678,6 +1675,7 @@ impl DefWithBody {
for d in &infer.diagnostics {
acc.extend(AnyDiagnostic::inference_diagnostic(db, self.into(), d, &source_map));
}
for (pat_or_expr, mismatch) in infer.type_mismatches() {
let expr_or_pat = match pat_or_expr {
ExprOrPatId::ExprId(expr) => source_map.expr_syntax(expr).map(Either::Left),
@ -1763,7 +1761,9 @@ impl DefWithBody {
need_mut = &mir::MutabilityReason::Not;
}
let local = Local { parent: self.into(), binding_id };
match (need_mut, local.is_mut(db)) {
let is_mut = body[binding_id].mode == BindingAnnotation::Mutable;
match (need_mut, is_mut) {
(mir::MutabilityReason::Unused, _) => {
let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with('_'));
if !should_ignore {
@ -2007,12 +2007,15 @@ impl Function {
/// is this a `fn main` or a function with an `export_name` of `main`?
pub fn is_main(self, db: &dyn HirDatabase) -> bool {
if !self.module(db).is_crate_root() {
return false;
}
let data = db.function_data(self.id);
data.attrs.export_name() == Some("main")
|| self.module(db).is_crate_root() && data.name.to_smol_str() == "main"
}
data.name.to_smol_str() == "main" || data.attrs.export_name() == Some("main")
/// Is this a function with an `export_name` of `main`?
pub fn exported_main(self, db: &dyn HirDatabase) -> bool {
let data = db.function_data(self.id);
data.attrs.export_name() == Some("main")
}
/// Does this function have the ignore attribute?
@ -3909,7 +3912,7 @@ impl Type {
inner.derived(
TyKind::Ref(
if m.is_mut() { hir_ty::Mutability::Mut } else { hir_ty::Mutability::Not },
hir_ty::static_lifetime(),
hir_ty::error_lifetime(),
inner.ty.clone(),
)
.intern(Interner),
@ -4492,7 +4495,7 @@ impl Type {
name: Option<&Name>,
mut callback: impl FnMut(Function) -> Option<T>,
) -> Option<T> {
let _p = tracing::span!(tracing::Level::INFO, "iterate_method_candidates");
let _p = tracing::span!(tracing::Level::INFO, "iterate_method_candidates").entered();
let mut slot = None;
self.iterate_method_candidates_dyn(
@ -4580,7 +4583,7 @@ impl Type {
name: Option<&Name>,
mut callback: impl FnMut(AssocItem) -> Option<T>,
) -> Option<T> {
let _p = tracing::span!(tracing::Level::INFO, "iterate_path_candidates");
let _p = tracing::span!(tracing::Level::INFO, "iterate_path_candidates").entered();
let mut slot = None;
self.iterate_path_candidates_dyn(
db,
@ -4647,7 +4650,7 @@ impl Type {
&'a self,
db: &'a dyn HirDatabase,
) -> impl Iterator<Item = Trait> + 'a {
let _p = tracing::span!(tracing::Level::INFO, "applicable_inherent_traits");
let _p = tracing::span!(tracing::Level::INFO, "applicable_inherent_traits").entered();
self.autoderef_(db)
.filter_map(|ty| ty.dyn_trait())
.flat_map(move |dyn_trait_id| hir_ty::all_super_traits(db.upcast(), dyn_trait_id))
@ -4655,7 +4658,7 @@ impl Type {
}
pub fn env_traits<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Trait> + 'a {
let _p = tracing::span!(tracing::Level::INFO, "env_traits");
let _p = tracing::span!(tracing::Level::INFO, "env_traits").entered();
self.autoderef_(db)
.filter(|ty| matches!(ty.kind(Interner), TyKind::Placeholder(_)))
.flat_map(|ty| {
@ -4709,10 +4712,12 @@ impl Type {
if let WhereClause::Implemented(trait_ref) = pred.skip_binders() {
cb(type_.clone());
// skip the self type. it's likely the type we just got the bounds from
for ty in
trait_ref.substitution.iter(Interner).skip(1).filter_map(|a| a.ty(Interner))
{
walk_type(db, &type_.derived(ty.clone()), cb);
if let [self_ty, params @ ..] = trait_ref.substitution.as_slice(Interner) {
for ty in
params.iter().filter(|&ty| ty != self_ty).filter_map(|a| a.ty(Interner))
{
walk_type(db, &type_.derived(ty.clone()), cb);
}
}
}
}

View file

@ -722,7 +722,7 @@ impl<'db> SemanticsImpl<'db> {
mut token: SyntaxToken,
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
) {
let _p = tracing::span!(tracing::Level::INFO, "descend_into_macros");
let _p = tracing::span!(tracing::Level::INFO, "descend_into_macros").entered();
let (sa, span, file_id) =
match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
Some(sa) => match sa.file_id.file_id() {
@ -1246,6 +1246,17 @@ impl<'db> SemanticsImpl<'db> {
.map_or(false, |m| matches!(m.id, MacroId::ProcMacroId(..)))
}
pub fn resolve_macro_call_arm(&self, macro_call: &ast::MacroCall) -> Option<u32> {
let sa = self.analyze(macro_call.syntax())?;
self.db
.parse_macro_expansion(
sa.expand(self.db, self.wrap_node_infile(macro_call.clone()).as_ref())?,
)
.value
.1
.matched_arm
}
pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
let sa = match self.analyze(macro_call.syntax()) {
Some(it) => it,
@ -1359,7 +1370,7 @@ impl<'db> SemanticsImpl<'db> {
offset: Option<TextSize>,
infer_body: bool,
) -> Option<SourceAnalyzer> {
let _p = tracing::span!(tracing::Level::INFO, "Semantics::analyze_impl");
let _p = tracing::span!(tracing::Level::INFO, "Semantics::analyze_impl").entered();
let node = self.find_file(node);
let container = self.with_ctx(|ctx| ctx.find_container(node))?;

View file

@ -118,7 +118,7 @@ pub(super) struct SourceToDefCtx<'a, 'b> {
impl SourceToDefCtx<'_, '_> {
pub(super) fn file_to_def(&self, file: FileId) -> SmallVec<[ModuleId; 1]> {
let _p = tracing::span!(tracing::Level::INFO, "SourceBinder::file_to_module_def");
let _p = tracing::span!(tracing::Level::INFO, "SourceBinder::file_to_module_def").entered();
let mut mods = SmallVec::new();
for &crate_id in self.db.relevant_crates(file).iter() {
// FIXME: inner items
@ -133,7 +133,7 @@ impl SourceToDefCtx<'_, '_> {
}
pub(super) fn module_to_def(&mut self, src: InFile<ast::Module>) -> Option<ModuleId> {
let _p = tracing::span!(tracing::Level::INFO, "module_to_def");
let _p = tracing::span!(tracing::Level::INFO, "module_to_def").entered();
let parent_declaration = src
.syntax()
.ancestors_with_macros_skip_attr_item(self.db.upcast())
@ -158,7 +158,7 @@ impl SourceToDefCtx<'_, '_> {
}
pub(super) fn source_file_to_def(&self, src: InFile<ast::SourceFile>) -> Option<ModuleId> {
let _p = tracing::span!(tracing::Level::INFO, "source_file_to_def");
let _p = tracing::span!(tracing::Level::INFO, "source_file_to_def").entered();
let file_id = src.file_id.original_file(self.db.upcast());
self.file_to_def(file_id).first().copied()
}

View file

@ -105,7 +105,7 @@ fn add_missing_impl_members_inner(
assist_id: &'static str,
label: &'static str,
) -> Option<()> {
let _p = tracing::span!(tracing::Level::INFO, "add_missing_impl_members_inner");
let _p = tracing::span!(tracing::Level::INFO, "add_missing_impl_members_inner").entered();
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?;
let impl_ = ctx.sema.to_def(&impl_def)?;

View file

@ -1588,4 +1588,82 @@ mod bar {
"#,
);
}
#[test]
fn local_inline_import_has_alias() {
// FIXME
check_assist_not_applicable(
auto_import,
r#"
struct S<T>(T);
use S as IoResult;
mod foo {
pub fn bar() -> S$0<()> {}
}
"#,
);
}
#[test]
fn alias_local() {
// FIXME
check_assist_not_applicable(
auto_import,
r#"
struct S<T>(T);
use S as IoResult;
mod foo {
pub fn bar() -> IoResult$0<()> {}
}
"#,
);
}
#[test]
fn preserve_raw_identifiers_strict() {
check_assist(
auto_import,
r"
r#as$0
pub mod ffi_mod {
pub fn r#as() {};
}
",
r"
use ffi_mod::r#as;
r#as
pub mod ffi_mod {
pub fn r#as() {};
}
",
);
}
#[test]
fn preserve_raw_identifiers_reserved() {
check_assist(
auto_import,
r"
r#abstract$0
pub mod ffi_mod {
pub fn r#abstract() {};
}
",
r"
use ffi_mod::r#abstract;
r#abstract
pub mod ffi_mod {
pub fn r#abstract() {};
}
",
);
}
}

View file

@ -0,0 +1,250 @@
use ide_db::{famous_defs::FamousDefs, traits::resolve_target_trait};
use itertools::Itertools;
use syntax::{
ast::{self, make, AstNode, HasName},
ted,
};
use crate::{AssistContext, AssistId, AssistKind, Assists};
// Assist: convert_from_to_tryfrom
//
// Converts a From impl to a TryFrom impl, wrapping returns in `Ok`.
//
// ```
// # //- minicore: from
// impl $0From<usize> for Thing {
// fn from(val: usize) -> Self {
// Thing {
// b: val.to_string(),
// a: val
// }
// }
// }
// ```
// ->
// ```
// impl TryFrom<usize> for Thing {
// type Error = ${0:()};
//
// fn try_from(val: usize) -> Result<Self, Self::Error> {
// Ok(Thing {
// b: val.to_string(),
// a: val
// })
// }
// }
// ```
pub(crate) fn convert_from_to_tryfrom(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let impl_ = ctx.find_node_at_offset::<ast::Impl>()?;
let trait_ty = impl_.trait_()?;
let module = ctx.sema.scope(impl_.syntax())?.module();
let from_type = match &trait_ty {
ast::Type::PathType(path) => {
path.path()?.segment()?.generic_arg_list()?.generic_args().next()?
}
_ => return None,
};
let associated_items = impl_.assoc_item_list()?;
let from_fn = associated_items.assoc_items().find_map(|item| {
if let ast::AssocItem::Fn(f) = item {
if f.name()?.text() == "from" {
return Some(f);
}
};
None
})?;
let from_fn_name = from_fn.name()?;
let from_fn_return_type = from_fn.ret_type()?.ty()?;
let return_exprs = from_fn.body()?.syntax().descendants().filter_map(ast::ReturnExpr::cast);
let tail_expr = from_fn.body()?.tail_expr()?;
if resolve_target_trait(&ctx.sema, &impl_)?
!= FamousDefs(&ctx.sema, module.krate()).core_convert_From()?
{
return None;
}
acc.add(
AssistId("convert_from_to_tryfrom", AssistKind::RefactorRewrite),
"Convert From to TryFrom",
impl_.syntax().text_range(),
|builder| {
let trait_ty = builder.make_mut(trait_ty);
let from_fn_return_type = builder.make_mut(from_fn_return_type);
let from_fn_name = builder.make_mut(from_fn_name);
let tail_expr = builder.make_mut(tail_expr);
let return_exprs = return_exprs.map(|r| builder.make_mut(r)).collect_vec();
let associated_items = builder.make_mut(associated_items).clone();
ted::replace(
trait_ty.syntax(),
make::ty(&format!("TryFrom<{from_type}>")).syntax().clone_for_update(),
);
ted::replace(
from_fn_return_type.syntax(),
make::ty("Result<Self, Self::Error>").syntax().clone_for_update(),
);
ted::replace(from_fn_name.syntax(), make::name("try_from").syntax().clone_for_update());
ted::replace(
tail_expr.syntax(),
wrap_ok(tail_expr.clone()).syntax().clone_for_update(),
);
for r in return_exprs {
let t = r.expr().unwrap_or_else(make::expr_unit);
ted::replace(t.syntax(), wrap_ok(t.clone()).syntax().clone_for_update());
}
let error_type = ast::AssocItem::TypeAlias(make::ty_alias(
"Error",
None,
None,
None,
Some((make::ty_unit(), None)),
))
.clone_for_update();
if let Some(cap) = ctx.config.snippet_cap {
if let ast::AssocItem::TypeAlias(type_alias) = &error_type {
if let Some(ty) = type_alias.ty() {
builder.add_placeholder_snippet(cap, ty);
}
}
}
associated_items.add_item_at_start(error_type);
},
)
}
fn wrap_ok(expr: ast::Expr) -> ast::Expr {
make::expr_call(
make::expr_path(make::ext::ident_path("Ok")),
make::arg_list(std::iter::once(expr)),
)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::{check_assist, check_assist_not_applicable};
#[test]
fn converts_from_to_tryfrom() {
check_assist(
convert_from_to_tryfrom,
r#"
//- minicore: from
struct Foo(String);
impl $0From<String> for Foo {
fn from(val: String) -> Self {
if val == "bar" {
return Foo(val);
}
Self(val)
}
}
"#,
r#"
struct Foo(String);
impl TryFrom<String> for Foo {
type Error = ${0:()};
fn try_from(val: String) -> Result<Self, Self::Error> {
if val == "bar" {
return Ok(Foo(val));
}
Ok(Self(val))
}
}
"#,
);
}
#[test]
fn converts_from_to_tryfrom_nested_type() {
check_assist(
convert_from_to_tryfrom,
r#"
//- minicore: from
struct Foo(String);
impl $0From<Option<String>> for Foo {
fn from(val: Option<String>) -> Self {
match val {
Some(val) => Foo(val),
None => Foo("".to_string())
}
}
}
"#,
r#"
struct Foo(String);
impl TryFrom<Option<String>> for Foo {
type Error = ${0:()};
fn try_from(val: Option<String>) -> Result<Self, Self::Error> {
Ok(match val {
Some(val) => Foo(val),
None => Foo("".to_string())
})
}
}
"#,
);
}
#[test]
fn converts_from_to_tryfrom_preserves_lifetimes() {
check_assist(
convert_from_to_tryfrom,
r#"
//- minicore: from
struct Foo<'a>(&'a str);
impl<'a> $0From<&'a str> for Foo<'a> {
fn from(val: &'a str) -> Self {
Self(val)
}
}
"#,
r#"
struct Foo<'a>(&'a str);
impl<'a> TryFrom<&'a str> for Foo<'a> {
type Error = ${0:()};
fn try_from(val: &'a str) -> Result<Self, Self::Error> {
Ok(Self(val))
}
}
"#,
);
}
#[test]
fn other_trait_not_applicable() {
check_assist_not_applicable(
convert_from_to_tryfrom,
r#"
struct Foo(String);
impl $0TryFrom<String> for Foo {
fn try_from(val: String) -> Result<Self, Self::Error> {
Ok(Self(val))
}
}
"#,
);
}
}

View file

@ -1149,8 +1149,14 @@ fn reference_is_exclusive(
node: &dyn HasTokenAtOffset,
ctx: &AssistContext<'_>,
) -> bool {
// FIXME: this quite an incorrect way to go about doing this :-)
// `FileReference` is an IDE-type --- it encapsulates data communicated to the human,
// but doesn't necessary fully reflect all the intricacies of the underlying language semantics
// The correct approach here would be to expose this entire analysis as a method on some hir
// type. Something like `body.free_variables(statement_range)`.
// we directly modify variable with set: `n = 0`, `n += 1`
if reference.category == Some(ReferenceCategory::Write) {
if reference.category.contains(ReferenceCategory::WRITE) {
return true;
}
@ -5617,7 +5623,7 @@ fn func<T: Debug>(i: Struct<'_, T>) {
fun_name(i);
}
fn $0fun_name(i: Struct<'static, T>) {
fn $0fun_name(i: Struct<'_, T>) {
foo(i);
}
"#,

View file

@ -175,8 +175,7 @@ fn add_func_to_accumulator(
edit.edit_file(file);
let target = function_builder.target.clone();
let function_template = function_builder.render();
let func = function_template.to_ast(ctx.config.snippet_cap, edit);
let func = function_builder.render(ctx.config.snippet_cap, edit);
if let Some(name) = adt_name {
let name = make::ty_path(make::ext::ident_path(&format!("{}", name.display(ctx.db()))));
@ -205,37 +204,6 @@ fn get_adt_source(
find_struct_impl(ctx, &adt_source, &[fn_name.to_owned()]).map(|impl_| (impl_, range.file_id))
}
struct FunctionTemplate {
fn_def: ast::Fn,
ret_type: Option<ast::RetType>,
should_focus_return_type: bool,
tail_expr: ast::Expr,
}
impl FunctionTemplate {
fn to_ast(&self, cap: Option<SnippetCap>, edit: &mut SourceChangeBuilder) -> ast::Fn {
let Self { fn_def, ret_type, should_focus_return_type, tail_expr } = self;
if let Some(cap) = cap {
if *should_focus_return_type {
// Focus the return type if there is one
match ret_type {
Some(ret_type) => {
edit.add_placeholder_snippet(cap, ret_type.clone());
}
None => {
edit.add_placeholder_snippet(cap, tail_expr.clone());
}
}
} else {
edit.add_placeholder_snippet(cap, tail_expr.clone());
}
}
fn_def.clone()
}
}
struct FunctionBuilder {
target: GeneratedFunctionTarget,
fn_name: ast::Name,
@ -339,7 +307,7 @@ impl FunctionBuilder {
})
}
fn render(self) -> FunctionTemplate {
fn render(self, cap: Option<SnippetCap>, edit: &mut SourceChangeBuilder) -> ast::Fn {
let placeholder_expr = make::ext::expr_todo();
let fn_body = make::block_expr(vec![], Some(placeholder_expr));
let visibility = match self.visibility {
@ -361,17 +329,31 @@ impl FunctionBuilder {
)
.clone_for_update();
FunctionTemplate {
ret_type: fn_def.ret_type(),
// PANIC: we guarantee we always create a function body with a tail expr
tail_expr: fn_def
.body()
.expect("generated function should have a body")
.tail_expr()
.expect("function body should have a tail expression"),
should_focus_return_type: self.should_focus_return_type,
fn_def,
let ret_type = fn_def.ret_type();
// PANIC: we guarantee we always create a function body with a tail expr
let tail_expr = fn_def
.body()
.expect("generated function should have a body")
.tail_expr()
.expect("function body should have a tail expression");
if let Some(cap) = cap {
if self.should_focus_return_type {
// Focus the return type if there is one
match ret_type {
Some(ret_type) => {
edit.add_placeholder_snippet(cap, ret_type.clone());
}
None => {
edit.add_placeholder_snippet(cap, tail_expr.clone());
}
}
} else {
edit.add_placeholder_snippet(cap, tail_expr.clone());
}
}
fn_def
}
}

View file

@ -49,13 +49,13 @@ use crate::{
//
// fn bar() {
// {
// let word = "안녕하세요";
// let word: &str = "안녕하세요";
// if !word.is_empty() {
// print(word);
// }
// };
// {
// let word = "여러분";
// let word: &str = "여러분";
// if !word.is_empty() {
// print(word);
// }

View file

@ -59,10 +59,7 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>)
let ty = match ty.display_source_code(ctx.db(), module.into(), false) {
Ok(ty) => ty,
Err(_) => {
cov_mark::hit!(promote_local_not_applicable_if_ty_not_inferred);
return None;
}
Err(_) => return None,
};
let initializer = let_stmt.initializer()?;
@ -315,13 +312,17 @@ fn foo() {
#[test]
fn not_applicable_unknown_ty() {
cov_mark::check!(promote_local_not_applicable_if_ty_not_inferred);
check_assist_not_applicable(
check_assist(
promote_local_to_const,
r"
fn foo() {
let x$0 = bar();
}
",
r"
fn foo() {
const $0X: _ = bar();
}
",
);
}

View file

@ -145,7 +145,7 @@ fn used_once_in_scope(ctx: &AssistContext<'_>, def: Definition, scopes: &Vec<Sea
for scope in scopes {
let mut search_non_import = |_, r: FileReference| {
// The import itself is a use; we must skip that.
if r.category != Some(ReferenceCategory::Import) {
if !r.category.contains(ReferenceCategory::IMPORT) {
found = true;
true
} else {

View file

@ -3,7 +3,7 @@ use syntax::{
AstNode, AstToken,
};
use crate::{utils::test_related_attribute, AssistContext, AssistId, AssistKind, Assists};
use crate::{utils::test_related_attribute_syn, AssistContext, AssistId, AssistKind, Assists};
// Assist: toggle_ignore
//
@ -26,7 +26,7 @@ use crate::{utils::test_related_attribute, AssistContext, AssistId, AssistKind,
pub(crate) fn toggle_ignore(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let attr: ast::Attr = ctx.find_node_at_offset()?;
let func = attr.syntax().parent().and_then(ast::Fn::cast)?;
let attr = test_related_attribute(&func)?;
let attr = test_related_attribute_syn(&func)?;
match has_ignore_attribute(&func) {
None => acc.add(

View file

@ -0,0 +1,581 @@
use ide_db::source_change::SourceChangeBuilder;
use itertools::Itertools;
use syntax::{
algo,
ast::{self, make, AstNode},
ted::{self, Position},
NodeOrToken, SyntaxToken, TextRange, T,
};
use crate::{AssistContext, AssistId, AssistKind, Assists};
// Assist: wrap_unwrap_cfg_attr
//
// Wraps an attribute to a cfg_attr attribute or unwraps a cfg_attr attribute to the inner attributes.
//
// ```
// #[derive$0(Debug)]
// struct S {
// field: i32
// }
// ```
// ->
// ```
// #[cfg_attr($0, derive(Debug))]
// struct S {
// field: i32
// }
enum WrapUnwrapOption {
WrapDerive { derive: TextRange, attr: ast::Attr },
WrapAttr(ast::Attr),
}
/// Attempts to get the derive attribute from a derive attribute list
///
/// This will collect all the tokens in the "path" within the derive attribute list
/// But a derive attribute list doesn't have paths. So we need to collect all the tokens before and after the ident
///
/// If this functions return None just map to WrapAttr
fn attempt_get_derive(attr: ast::Attr, ident: SyntaxToken) -> WrapUnwrapOption {
let attempt_attr = || {
{
let mut derive = ident.text_range();
// TokenTree is all the tokens between the `(` and `)`. They do not have paths. So a path `serde::Serialize` would be [Ident Colon Colon Ident]
// So lets say we have derive(Debug, serde::Serialize, Copy) ident would be on Serialize
// We need to grab all previous tokens until we find a `,` or `(` and all following tokens until we find a `,` or `)`
// We also want to consume the following comma if it exists
let mut prev = algo::skip_trivia_token(
ident.prev_sibling_or_token()?.into_token()?,
syntax::Direction::Prev,
)?;
let mut following = algo::skip_trivia_token(
ident.next_sibling_or_token()?.into_token()?,
syntax::Direction::Next,
)?;
if (prev.kind() == T![,] || prev.kind() == T!['('])
&& (following.kind() == T![,] || following.kind() == T![')'])
{
// This would be a single ident such as Debug. As no path is present
if following.kind() == T![,] {
derive = derive.cover(following.text_range());
} else if following.kind() == T![')'] && prev.kind() == T![,] {
derive = derive.cover(prev.text_range());
}
Some(WrapUnwrapOption::WrapDerive { derive, attr: attr.clone() })
} else {
let mut consumed_comma = false;
// Collect the path
while let Some(prev_token) = algo::skip_trivia_token(prev, syntax::Direction::Prev)
{
let kind = prev_token.kind();
if kind == T![,] {
consumed_comma = true;
derive = derive.cover(prev_token.text_range());
break;
} else if kind == T!['('] {
break;
} else {
derive = derive.cover(prev_token.text_range());
}
prev = prev_token.prev_sibling_or_token()?.into_token()?;
}
while let Some(next_token) =
algo::skip_trivia_token(following.clone(), syntax::Direction::Next)
{
let kind = next_token.kind();
match kind {
T![,] if !consumed_comma => {
derive = derive.cover(next_token.text_range());
break;
}
T![')'] | T![,] => break,
_ => derive = derive.cover(next_token.text_range()),
}
following = next_token.next_sibling_or_token()?.into_token()?;
}
Some(WrapUnwrapOption::WrapDerive { derive, attr: attr.clone() })
}
}
};
if ident.parent().and_then(ast::TokenTree::cast).is_none()
|| !attr.simple_name().map(|v| v.eq("derive")).unwrap_or_default()
{
WrapUnwrapOption::WrapAttr(attr)
} else {
attempt_attr().unwrap_or(WrapUnwrapOption::WrapAttr(attr))
}
}
pub(crate) fn wrap_unwrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let option = if ctx.has_empty_selection() {
let ident = ctx.find_token_syntax_at_offset(T![ident]);
let attr = ctx.find_node_at_offset::<ast::Attr>();
match (attr, ident) {
(Some(attr), Some(ident))
if attr.simple_name().map(|v| v.eq("derive")).unwrap_or_default() =>
{
Some(attempt_get_derive(attr.clone(), ident))
}
(Some(attr), _) => Some(WrapUnwrapOption::WrapAttr(attr)),
_ => None,
}
} else {
let covering_element = ctx.covering_element();
match covering_element {
NodeOrToken::Node(node) => ast::Attr::cast(node).map(WrapUnwrapOption::WrapAttr),
NodeOrToken::Token(ident) if ident.kind() == syntax::T![ident] => {
let attr = ident.parent_ancestors().find_map(ast::Attr::cast)?;
Some(attempt_get_derive(attr.clone(), ident))
}
_ => None,
}
}?;
match option {
WrapUnwrapOption::WrapAttr(attr) if attr.simple_name().as_deref() == Some("cfg_attr") => {
unwrap_cfg_attr(acc, attr)
}
WrapUnwrapOption::WrapAttr(attr) => wrap_cfg_attr(acc, ctx, attr),
WrapUnwrapOption::WrapDerive { derive, attr } => wrap_derive(acc, ctx, attr, derive),
}
}
fn wrap_derive(
acc: &mut Assists,
ctx: &AssistContext<'_>,
attr: ast::Attr,
derive_element: TextRange,
) -> Option<()> {
let range = attr.syntax().text_range();
let token_tree = attr.token_tree()?;
let mut path_text = String::new();
let mut cfg_derive_tokens = Vec::new();
let mut new_derive = Vec::new();
for tt in token_tree.token_trees_and_tokens() {
let NodeOrToken::Token(token) = tt else {
continue;
};
if token.kind() == T!['('] || token.kind() == T![')'] {
continue;
}
if derive_element.contains_range(token.text_range()) {
if token.kind() != T![,] && token.kind() != syntax::SyntaxKind::WHITESPACE {
path_text.push_str(token.text());
cfg_derive_tokens.push(NodeOrToken::Token(token));
}
} else {
new_derive.push(NodeOrToken::Token(token));
}
}
let handle_source_change = |edit: &mut SourceChangeBuilder| {
let new_derive = make::attr_outer(make::meta_token_tree(
make::ext::ident_path("derive"),
make::token_tree(T!['('], new_derive),
))
.clone_for_update();
let meta = make::meta_token_tree(
make::ext::ident_path("cfg_attr"),
make::token_tree(
T!['('],
vec![
NodeOrToken::Token(make::token(T![,])),
NodeOrToken::Token(make::tokens::whitespace(" ")),
NodeOrToken::Token(make::tokens::ident("derive")),
NodeOrToken::Node(make::token_tree(T!['('], cfg_derive_tokens)),
],
),
);
// Remove the derive attribute
let edit_attr = edit.make_syntax_mut(attr.syntax().clone());
ted::replace(edit_attr, new_derive.syntax().clone());
let cfg_attr = make::attr_outer(meta).clone_for_update();
ted::insert_all_raw(
Position::after(new_derive.syntax().clone()),
vec![make::tokens::whitespace("\n").into(), cfg_attr.syntax().clone().into()],
);
if let Some(snippet_cap) = ctx.config.snippet_cap {
if let Some(first_meta) =
cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token())
{
edit.add_tabstop_after_token(snippet_cap, first_meta)
}
}
};
acc.add(
AssistId("wrap_unwrap_cfg_attr", AssistKind::Refactor),
format!("Wrap #[derive({path_text})] in `cfg_attr`",),
range,
handle_source_change,
);
Some(())
}
fn wrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>, attr: ast::Attr) -> Option<()> {
let range = attr.syntax().text_range();
let path = attr.path()?;
let handle_source_change = |edit: &mut SourceChangeBuilder| {
let mut raw_tokens = vec![
NodeOrToken::Token(make::token(T![,])),
NodeOrToken::Token(make::tokens::whitespace(" ")),
];
path.syntax().descendants_with_tokens().for_each(|it| {
if let NodeOrToken::Token(token) = it {
raw_tokens.push(NodeOrToken::Token(token));
}
});
if let Some(meta) = attr.meta() {
if let (Some(eq), Some(expr)) = (meta.eq_token(), meta.expr()) {
raw_tokens.push(NodeOrToken::Token(make::tokens::whitespace(" ")));
raw_tokens.push(NodeOrToken::Token(eq.clone()));
raw_tokens.push(NodeOrToken::Token(make::tokens::whitespace(" ")));
expr.syntax().descendants_with_tokens().for_each(|it| {
if let NodeOrToken::Token(token) = it {
raw_tokens.push(NodeOrToken::Token(token));
}
});
} else if let Some(tt) = meta.token_tree() {
raw_tokens.extend(tt.token_trees_and_tokens());
}
}
let meta = make::meta_token_tree(
make::ext::ident_path("cfg_attr"),
make::token_tree(T!['('], raw_tokens),
);
let cfg_attr = if attr.excl_token().is_some() {
make::attr_inner(meta)
} else {
make::attr_outer(meta)
}
.clone_for_update();
let attr_syntax = edit.make_syntax_mut(attr.syntax().clone());
ted::replace(attr_syntax, cfg_attr.syntax());
if let Some(snippet_cap) = ctx.config.snippet_cap {
if let Some(first_meta) =
cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token())
{
edit.add_tabstop_after_token(snippet_cap, first_meta)
}
}
};
acc.add(
AssistId("wrap_unwrap_cfg_attr", AssistKind::Refactor),
"Convert to `cfg_attr`",
range,
handle_source_change,
);
Some(())
}
fn unwrap_cfg_attr(acc: &mut Assists, attr: ast::Attr) -> Option<()> {
let range = attr.syntax().text_range();
let meta = attr.meta()?;
let meta_tt = meta.token_tree()?;
let mut inner_attrs = Vec::with_capacity(1);
let mut found_comma = false;
let mut iter = meta_tt.token_trees_and_tokens().skip(1).peekable();
while let Some(tt) = iter.next() {
if let NodeOrToken::Token(token) = &tt {
if token.kind() == T![')'] {
break;
}
if token.kind() == T![,] {
found_comma = true;
continue;
}
}
if !found_comma {
continue;
}
let Some(attr_name) = tt.into_token().and_then(|token| {
if token.kind() == T![ident] {
Some(make::ext::ident_path(token.text()))
} else {
None
}
}) else {
continue;
};
let next_tt = iter.next()?;
let meta = match next_tt {
NodeOrToken::Node(tt) => make::meta_token_tree(attr_name, tt),
NodeOrToken::Token(token) if token.kind() == T![,] || token.kind() == T![')'] => {
make::meta_path(attr_name)
}
NodeOrToken::Token(token) => {
let equals = algo::skip_trivia_token(token, syntax::Direction::Next)?;
if equals.kind() != T![=] {
return None;
}
let expr_token =
algo::skip_trivia_token(equals.next_token()?, syntax::Direction::Next)
.and_then(|it| {
if it.kind().is_literal() {
Some(make::expr_literal(it.text()))
} else {
None
}
})?;
make::meta_expr(attr_name, ast::Expr::Literal(expr_token))
}
};
if attr.excl_token().is_some() {
inner_attrs.push(make::attr_inner(meta));
} else {
inner_attrs.push(make::attr_outer(meta));
}
}
if inner_attrs.is_empty() {
return None;
}
let handle_source_change = |f: &mut SourceChangeBuilder| {
let inner_attrs = inner_attrs.iter().map(|it| it.to_string()).join("\n");
f.replace(range, inner_attrs);
};
acc.add(
AssistId("wrap_unwrap_cfg_attr", AssistKind::Refactor),
"Extract Inner Attributes from `cfg_attr`",
range,
handle_source_change,
);
Some(())
}
#[cfg(test)]
mod tests {
use crate::tests::check_assist;
use super::*;
#[test]
fn test_basic_to_from_cfg_attr() {
check_assist(
wrap_unwrap_cfg_attr,
r#"
#[derive$0(Debug)]
pub struct Test {
test: u32,
}
"#,
r#"
#[cfg_attr($0, derive(Debug))]
pub struct Test {
test: u32,
}
"#,
);
check_assist(
wrap_unwrap_cfg_attr,
r#"
#[cfg_attr(debug_assertions, $0 derive(Debug))]
pub struct Test {
test: u32,
}
"#,
r#"
#[derive(Debug)]
pub struct Test {
test: u32,
}
"#,
);
}
#[test]
fn to_from_path_attr() {
check_assist(
wrap_unwrap_cfg_attr,
r#"
pub struct Test {
#[foo$0]
test: u32,
}
"#,
r#"
pub struct Test {
#[cfg_attr($0, foo)]
test: u32,
}
"#,
);
check_assist(
wrap_unwrap_cfg_attr,
r#"
pub struct Test {
#[cfg_attr(debug_assertions$0, foo)]
test: u32,
}
"#,
r#"
pub struct Test {
#[foo]
test: u32,
}
"#,
);
}
#[test]
fn to_from_eq_attr() {
check_assist(
wrap_unwrap_cfg_attr,
r#"
pub struct Test {
#[foo = "bar"$0]
test: u32,
}
"#,
r#"
pub struct Test {
#[cfg_attr($0, foo = "bar")]
test: u32,
}
"#,
);
check_assist(
wrap_unwrap_cfg_attr,
r#"
pub struct Test {
#[cfg_attr(debug_assertions$0, foo = "bar")]
test: u32,
}
"#,
r#"
pub struct Test {
#[foo = "bar"]
test: u32,
}
"#,
);
}
#[test]
fn inner_attrs() {
check_assist(
wrap_unwrap_cfg_attr,
r#"
#![no_std$0]
"#,
r#"
#![cfg_attr($0, no_std)]
"#,
);
check_assist(
wrap_unwrap_cfg_attr,
r#"
#![cfg_attr(not(feature = "std")$0, no_std)]
"#,
r#"
#![no_std]
"#,
);
}
#[test]
fn test_derive_wrap() {
check_assist(
wrap_unwrap_cfg_attr,
r#"
#[derive(Debug$0, Clone, Copy)]
pub struct Test {
test: u32,
}
"#,
r#"
#[derive( Clone, Copy)]
#[cfg_attr($0, derive(Debug))]
pub struct Test {
test: u32,
}
"#,
);
check_assist(
wrap_unwrap_cfg_attr,
r#"
#[derive(Clone, Debug$0, Copy)]
pub struct Test {
test: u32,
}
"#,
r#"
#[derive(Clone, Copy)]
#[cfg_attr($0, derive(Debug))]
pub struct Test {
test: u32,
}
"#,
);
}
#[test]
fn test_derive_wrap_with_path() {
check_assist(
wrap_unwrap_cfg_attr,
r#"
#[derive(std::fmt::Debug$0, Clone, Copy)]
pub struct Test {
test: u32,
}
"#,
r#"
#[derive( Clone, Copy)]
#[cfg_attr($0, derive(std::fmt::Debug))]
pub struct Test {
test: u32,
}
"#,
);
check_assist(
wrap_unwrap_cfg_attr,
r#"
#[derive(Clone, std::fmt::Debug$0, Copy)]
pub struct Test {
test: u32,
}
"#,
r#"
#[derive(Clone, Copy)]
#[cfg_attr($0, derive(std::fmt::Debug))]
pub struct Test {
test: u32,
}
"#,
);
}
#[test]
fn test_derive_wrap_at_end() {
check_assist(
wrap_unwrap_cfg_attr,
r#"
#[derive(std::fmt::Debug, Clone, Cop$0y)]
pub struct Test {
test: u32,
}
"#,
r#"
#[derive(std::fmt::Debug, Clone)]
#[cfg_attr($0, derive(Copy))]
pub struct Test {
test: u32,
}
"#,
);
check_assist(
wrap_unwrap_cfg_attr,
r#"
#[derive(Clone, Copy, std::fmt::D$0ebug)]
pub struct Test {
test: u32,
}
"#,
r#"
#[derive(Clone, Copy)]
#[cfg_attr($0, derive(std::fmt::Debug))]
pub struct Test {
test: u32,
}
"#,
);
}
}

View file

@ -116,6 +116,7 @@ mod handlers {
mod change_visibility;
mod convert_bool_then;
mod convert_comment_block;
mod convert_from_to_tryfrom;
mod convert_integer_literal;
mod convert_into_to_from;
mod convert_iter_for_each_to_for;
@ -217,6 +218,7 @@ mod handlers {
mod unwrap_result_return_type;
mod unwrap_tuple;
mod wrap_return_type_in_result;
mod wrap_unwrap_cfg_attr;
pub(crate) fn all() -> &'static [Handler] {
&[
@ -237,6 +239,7 @@ mod handlers {
convert_bool_then::convert_bool_then_to_if,
convert_bool_then::convert_if_to_bool_then,
convert_comment_block::convert_comment_block,
convert_from_to_tryfrom::convert_from_to_tryfrom,
convert_integer_literal::convert_integer_literal,
convert_into_to_from::convert_into_to_from,
convert_iter_for_each_to_for::convert_iter_for_each_to_for,
@ -342,6 +345,8 @@ mod handlers {
unwrap_tuple::unwrap_tuple,
unqualify_method_call::unqualify_method_call,
wrap_return_type_in_result::wrap_return_type_in_result,
wrap_unwrap_cfg_attr::wrap_unwrap_cfg_attr,
// These are manually sorted for better priorities. By default,
// priority is determined by the size of the target range (smaller
// target wins). If the ranges are equal, position in this list is

View file

@ -390,6 +390,36 @@ fn main() {
)
}
#[test]
fn doctest_convert_from_to_tryfrom() {
check_doc_test(
"convert_from_to_tryfrom",
r#####"
//- minicore: from
impl $0From<usize> for Thing {
fn from(val: usize) -> Self {
Thing {
b: val.to_string(),
a: val
}
}
}
"#####,
r#####"
impl TryFrom<usize> for Thing {
type Error = ${0:()};
fn try_from(val: usize) -> Result<Self, Self::Error> {
Ok(Thing {
b: val.to_string(),
a: val
})
}
}
"#####,
)
}
#[test]
fn doctest_convert_if_to_bool_then() {
check_doc_test(
@ -1820,13 +1850,13 @@ fn print(_: &str) {}
fn bar() {
{
let word = "안녕하세요";
let word: &str = "안녕하세요";
if !word.is_empty() {
print(word);
}
};
{
let word = "여러분";
let word: &str = "여러분";
if !word.is_empty() {
print(word);
}
@ -3151,3 +3181,22 @@ fn foo() -> Result<i32, ${0:_}> { Ok(42i32) }
"#####,
)
}
#[test]
fn doctest_wrap_unwrap_cfg_attr() {
check_doc_test(
"wrap_unwrap_cfg_attr",
r#####"
#[derive$0(Debug)]
struct S {
field: i32
}
"#####,
r#####"
#[cfg_attr($0, derive(Debug))]
struct S {
field: i32
}
"#####,
)
}

View file

@ -71,7 +71,7 @@ pub fn extract_trivial_expression(block_expr: &ast::BlockExpr) -> Option<ast::Ex
///
/// It may produce false positives, for example, `#[wasm_bindgen_test]` requires a different command to run the test,
/// but it's better than not to have the runnables for the tests at all.
pub fn test_related_attribute(fn_def: &ast::Fn) -> Option<ast::Attr> {
pub fn test_related_attribute_syn(fn_def: &ast::Fn) -> Option<ast::Attr> {
fn_def.attrs().find_map(|attr| {
let path = attr.path()?;
let text = path.syntax().text().to_string();
@ -83,6 +83,19 @@ pub fn test_related_attribute(fn_def: &ast::Fn) -> Option<ast::Attr> {
})
}
pub fn has_test_related_attribute(attrs: &hir::AttrsWithOwner) -> bool {
attrs.iter().any(|attr| {
let path = attr.path();
(|| {
Some(
path.segments().first()?.as_text()?.starts_with("test")
|| path.segments().last()?.as_text()?.ends_with("test"),
)
})()
.unwrap_or_default()
})
}
#[derive(Clone, Copy, PartialEq)]
pub enum IgnoreAssocItems {
DocHiddenAttrPresent,

View file

@ -9,6 +9,7 @@ use ide_db::{
ty_filter::TryEnum,
SnippetCap,
};
use stdx::never;
use syntax::{
ast::{self, make, AstNode, AstToken},
SyntaxKind::{BLOCK_EXPR, EXPR_STMT, FOR_EXPR, IF_EXPR, LOOP_EXPR, STMT_LIST, WHILE_EXPR},
@ -319,7 +320,9 @@ fn build_postfix_snippet_builder<'ctx>(
) -> Option<impl Fn(&str, &str, &str) -> Builder + 'ctx> {
let receiver_range = ctx.sema.original_range_opt(receiver.syntax())?.range;
if ctx.source_range().end() < receiver_range.start() {
// This shouldn't happen, yet it does. I assume this might be due to an incorrect token mapping.
// This shouldn't happen, yet it does. I assume this might be due to an incorrect token
// mapping.
never!();
return None;
}
let delete_range = TextRange::new(receiver_range.start(), ctx.source_range().end());

View file

@ -226,7 +226,7 @@ pub(crate) fn complete_ascribed_type(
if !path_ctx.is_trivial_path() {
return None;
}
let x = match ascription {
let ty = match ascription {
TypeAscriptionTarget::Let(pat) | TypeAscriptionTarget::FnParam(pat) => {
ctx.sema.type_of_pat(pat.as_ref()?)
}
@ -235,7 +235,9 @@ pub(crate) fn complete_ascribed_type(
}
}?
.adjusted();
let ty_string = x.display_source_code(ctx.db, ctx.module.into(), true).ok()?;
acc.add(render_type_inference(ty_string, ctx));
if !ty.is_unknown() {
let ty_string = ty.display_source_code(ctx.db, ctx.module.into(), true).ok()?;
acc.add(render_type_inference(ty_string, ctx));
}
None
}

View file

@ -17,7 +17,7 @@ use ide_db::{
};
use syntax::{
ast::{self, AttrKind, NameOrNameRef},
AstNode, SmolStr,
AstNode, Edition, SmolStr,
SyntaxKind::{self, *},
SyntaxToken, TextRange, TextSize, T,
};
@ -667,7 +667,8 @@ impl<'a> CompletionContext<'a> {
let file_with_fake_ident = {
let parse = db.parse(file_id);
let edit = Indel::insert(offset, COMPLETION_MARKER.to_owned());
parse.reparse(&edit).tree()
// FIXME: Edition
parse.reparse(&edit, Edition::CURRENT).tree()
};
// always pick the token to the immediate left of the cursor, as that is what we are actually

View file

@ -200,7 +200,7 @@ fn validate_snippet(
) -> Option<(Box<[GreenNode]>, String, Option<Box<str>>)> {
let mut imports = Vec::with_capacity(requires.len());
for path in requires.iter() {
let use_path = ast::SourceFile::parse(&format!("use {path};"))
let use_path = ast::SourceFile::parse(&format!("use {path};"), syntax::Edition::CURRENT)
.syntax_node()
.descendants()
.find_map(ast::Path::cast)?;

View file

@ -19,7 +19,7 @@ struct Foo<'lt, T, const C: usize> where $0 {}
en Enum Enum
ma makro!() macro_rules! makro
md module
st Foo<> Foo<'static, {unknown}, _>
st Foo<> Foo<'{error}, {unknown}, _>
st Record Record
st Tuple Tuple
st Unit Unit
@ -92,7 +92,7 @@ struct Foo<'lt, T, const C: usize> where for<'a> $0 {}
en Enum Enum
ma makro!() macro_rules! makro
md module
st Foo<> Foo<'static, {unknown}, _>
st Foo<> Foo<'{error}, {unknown}, _>
st Record Record
st Tuple Tuple
st Unit Unit

View file

@ -20,8 +20,8 @@ struct Foo<'lt, T, const C: usize> {
en Enum Enum
ma makro!() macro_rules! makro
md module
sp Self Foo<'static, {unknown}, _>
st Foo<> Foo<'static, {unknown}, _>
sp Self Foo<'{error}, {unknown}, _>
st Foo<> Foo<'{error}, {unknown}, _>
st Record Record
st Tuple Tuple
st Unit Unit
@ -45,8 +45,8 @@ struct Foo<'lt, T, const C: usize>(f$0);
en Enum Enum
ma makro!() macro_rules! makro
md module
sp Self Foo<'static, {unknown}, _>
st Foo<> Foo<'static, {unknown}, _>
sp Self Foo<'{error}, {unknown}, _>
st Foo<> Foo<'{error}, {unknown}, _>
st Record Record
st Tuple Tuple
st Unit Unit

View file

@ -26,6 +26,7 @@ indexmap.workspace = true
memchr = "2.6.4"
triomphe.workspace = true
nohash-hasher.workspace = true
bitflags.workspace = true
# local deps
base-db.workspace = true

View file

@ -91,82 +91,101 @@ impl RootDatabase {
crate::symbol_index::LocalRootsQuery
crate::symbol_index::LibraryRootsQuery
// HirDatabase
hir::db::MirBodyQuery
hir::db::BorrowckQuery
hir::db::TyQuery
hir::db::ValueTyQuery
hir::db::ImplSelfTyQuery
hir::db::ConstParamTyQuery
hir::db::ConstEvalQuery
hir::db::ConstEvalDiscriminantQuery
hir::db::ImplTraitQuery
hir::db::FieldTypesQuery
hir::db::LayoutOfAdtQuery
hir::db::TargetDataLayoutQuery
hir::db::CallableItemSignatureQuery
hir::db::ReturnTypeImplTraitsQuery
hir::db::GenericPredicatesForParamQuery
hir::db::GenericPredicatesQuery
hir::db::TraitEnvironmentQuery
hir::db::GenericDefaultsQuery
hir::db::InherentImplsInCrateQuery
hir::db::InherentImplsInBlockQuery
hir::db::IncoherentInherentImplCratesQuery
hir::db::TraitImplsInCrateQuery
hir::db::TraitImplsInBlockQuery
hir::db::TraitImplsInDepsQuery
hir::db::InternCallableDefQuery
hir::db::InternLifetimeParamIdQuery
hir::db::InternImplTraitIdQuery
hir::db::InternTypeOrConstParamIdQuery
hir::db::InternClosureQuery
hir::db::InternCoroutineQuery
hir::db::AssociatedTyDataQuery
hir::db::TraitDatumQuery
hir::db::AdtDatumQuery
hir::db::ImplDatumQuery
hir::db::AdtVarianceQuery
hir::db::AssociatedTyDataQuery
hir::db::AssociatedTyValueQuery
hir::db::BorrowckQuery
hir::db::CallableItemSignatureQuery
hir::db::ConstEvalDiscriminantQuery
hir::db::ConstEvalQuery
hir::db::ConstEvalStaticQuery
hir::db::ConstParamTyQuery
hir::db::FieldTypesQuery
hir::db::FnDefDatumQuery
hir::db::FnDefVarianceQuery
hir::db::AdtVarianceQuery
hir::db::AssociatedTyValueQuery
hir::db::GenericDefaultsQuery
hir::db::GenericPredicatesForParamQuery
hir::db::GenericPredicatesQuery
hir::db::ImplDatumQuery
hir::db::ImplSelfTyQuery
hir::db::ImplTraitQuery
hir::db::IncoherentInherentImplCratesQuery
hir::db::InherentImplsInBlockQuery
hir::db::InherentImplsInCrateQuery
hir::db::InternCallableDefQuery
hir::db::InternClosureQuery
hir::db::InternCoroutineQuery
hir::db::InternImplTraitIdQuery
hir::db::InternLifetimeParamIdQuery
hir::db::InternTypeOrConstParamIdQuery
hir::db::LayoutOfAdtQuery
hir::db::MirBodyQuery
hir::db::ProgramClausesForChalkEnvQuery
hir::db::ReturnTypeImplTraitsQuery
hir::db::TargetDataLayoutQuery
hir::db::TraitDatumQuery
hir::db::TraitEnvironmentQuery
hir::db::TraitImplsInBlockQuery
hir::db::TraitImplsInCrateQuery
hir::db::TraitImplsInDepsQuery
hir::db::TyQuery
hir::db::ValueTyQuery
// DefDatabase
hir::db::FileItemTreeQuery
hir::db::AttrsQuery
hir::db::BlockDefMapQuery
hir::db::StructDataWithDiagnosticsQuery
hir::db::UnionDataWithDiagnosticsQuery
hir::db::BlockItemTreeQuery
hir::db::BodyQuery
hir::db::BodyWithSourceMapQuery
hir::db::ConstDataQuery
hir::db::ConstVisibilityQuery
hir::db::CrateDefMapQuery
hir::db::CrateLangItemsQuery
hir::db::CrateNotableTraitsQuery
hir::db::CrateSupportsNoStdQuery
hir::db::EnumDataQuery
hir::db::EnumVariantDataWithDiagnosticsQuery
hir::db::ImplDataWithDiagnosticsQuery
hir::db::TraitDataWithDiagnosticsQuery
hir::db::TraitAliasDataQuery
hir::db::TypeAliasDataQuery
hir::db::ExprScopesQuery
hir::db::ExternCrateDeclDataQuery
hir::db::FieldVisibilitiesQuery
hir::db::FieldsAttrsQuery
hir::db::FieldsAttrsSourceMapQuery
hir::db::FileItemTreeQuery
hir::db::FunctionDataQuery
hir::db::ConstDataQuery
hir::db::StaticDataQuery
hir::db::FunctionVisibilityQuery
hir::db::GenericParamsQuery
hir::db::ImplDataWithDiagnosticsQuery
hir::db::ImportMapQuery
hir::db::InternAnonymousConstQuery
hir::db::InternBlockQuery
hir::db::InternConstQuery
hir::db::InternEnumQuery
hir::db::InternExternBlockQuery
hir::db::InternExternCrateQuery
hir::db::InternFunctionQuery
hir::db::InternImplQuery
hir::db::InternInTypeConstQuery
hir::db::InternMacro2Query
hir::db::InternMacroRulesQuery
hir::db::InternProcMacroQuery
hir::db::InternStaticQuery
hir::db::InternStructQuery
hir::db::InternTraitAliasQuery
hir::db::InternTraitQuery
hir::db::InternTypeAliasQuery
hir::db::InternUnionQuery
hir::db::InternUseQuery
hir::db::LangItemQuery
hir::db::Macro2DataQuery
hir::db::MacroRulesDataQuery
hir::db::ProcMacroDataQuery
hir::db::BodyWithSourceMapQuery
hir::db::BodyQuery
hir::db::ExprScopesQuery
hir::db::GenericParamsQuery
hir::db::FieldsAttrsQuery
hir::db::FieldsAttrsSourceMapQuery
hir::db::AttrsQuery
hir::db::CrateLangItemsQuery
hir::db::LangItemQuery
hir::db::ImportMapQuery
hir::db::FieldVisibilitiesQuery
hir::db::FunctionVisibilityQuery
hir::db::ConstVisibilityQuery
hir::db::CrateSupportsNoStdQuery
hir::db::ExternCrateDeclDataQuery
hir::db::InternAnonymousConstQuery
hir::db::InternExternCrateQuery
hir::db::InternInTypeConstQuery
hir::db::InternUseQuery
hir::db::StaticDataQuery
hir::db::StructDataWithDiagnosticsQuery
hir::db::TraitAliasDataQuery
hir::db::TraitDataWithDiagnosticsQuery
hir::db::TypeAliasDataQuery
hir::db::UnionDataWithDiagnosticsQuery
// InternDatabase
hir::db::InternFunctionQuery
@ -192,9 +211,10 @@ impl RootDatabase {
hir::db::InternMacroCallQuery
hir::db::InternSyntaxContextQuery
hir::db::MacroArgQuery
hir::db::ParseMacroExpansionErrorQuery
hir::db::ParseMacroExpansionQuery
hir::db::RealSpanMapQuery
hir::db::ProcMacrosQuery
hir::db::RealSpanMapQuery
// LineIndexDatabase
crate::LineIndexQuery

View file

@ -91,8 +91,10 @@ pub fn docs_with_rangemap(
db: &dyn DefDatabase,
attrs: &AttrsWithOwner,
) -> Option<(Documentation, DocsRangeMap)> {
let docs =
attrs.by_key("doc").attrs().filter_map(|attr| attr.string_value().map(|s| (s, attr.id)));
let docs = attrs
.by_key("doc")
.attrs()
.filter_map(|attr| attr.string_value_unescape().map(|s| (s, attr.id)));
let indent = doc_indent(attrs);
let mut buf = String::new();
let mut mapping = Vec::new();
@ -132,7 +134,7 @@ pub fn docs_with_rangemap(
}
pub fn docs_from_attrs(attrs: &hir::Attrs) -> Option<String> {
let docs = attrs.by_key("doc").attrs().filter_map(|attr| attr.string_value());
let docs = attrs.by_key("doc").attrs().filter_map(|attr| attr.string_value_unescape());
let indent = doc_indent(attrs);
let mut buf = String::new();
for doc in docs {
@ -270,10 +272,9 @@ fn doc_indent(attrs: &hir::Attrs) -> usize {
attrs
.by_key("doc")
.attrs()
.filter_map(|attr| attr.string_value())
.filter_map(|attr| attr.string_value()) // no need to use unescape version here
.flat_map(|s| s.lines())
.filter(|line| !line.chars().all(|c| c.is_whitespace()))
.map(|line| line.chars().take_while(|c| c.is_whitespace()).count())
.filter_map(|line| line.chars().position(|c| !c.is_whitespace()))
.min()
.unwrap_or(0)
}

Some files were not shown because too many files have changed in this diff Show more