Merge commit '258b15c506a2d3ad862fd17ae24eaf272443f477' into sync-from-ra

This commit is contained in:
Laurențiu Nicola 2023-09-18 12:32:37 +03:00
commit cb4c5414ea
195 changed files with 5773 additions and 2750 deletions

View file

@ -86,12 +86,20 @@ jobs:
- name: Test - name: Test
run: cargo test ${{ env.USE_SYSROOT_ABI }} -- --nocapture --quiet run: cargo test ${{ env.USE_SYSROOT_ABI }} -- --nocapture --quiet
- name: Switch to stable toolchain
run: |
rustup update --no-self-update stable
rustup component add --toolchain stable rust-src
rustup default stable
- name: Run analysis-stats on rust-analyzer - name: Run analysis-stats on rust-analyzer
if: matrix.os == 'ubuntu-latest' if: matrix.os == 'ubuntu-latest'
run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats . run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats .
- name: Run analysis-stats on rust std library - name: Run analysis-stats on rust std library
if: matrix.os == 'ubuntu-latest' if: matrix.os == 'ubuntu-latest'
env:
RUSTC_BOOTSTRAP: 1
run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std
# Weird targets to catch non-portable code # Weird targets to catch non-portable code

View file

@ -67,7 +67,7 @@ jobs:
other_metrics: other_metrics:
strategy: strategy:
matrix: matrix:
names: [self, ripgrep, webrender, diesel] names: [self, ripgrep-13.0.0, webrender-2022, diesel-1.4.8, hyper-0.14.18]
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: [setup_cargo, build_metrics] needs: [setup_cargo, build_metrics]
@ -92,7 +92,7 @@ jobs:
key: ${{ runner.os }}-target-${{ github.sha }} key: ${{ runner.os }}-target-${{ github.sha }}
- name: Collect metrics - name: Collect metrics
run: cargo xtask metrics ${{ matrix.names }} run: cargo xtask metrics "${{ matrix.names }}"
- name: Upload metrics - name: Upload metrics
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v3
@ -118,25 +118,30 @@ jobs:
with: with:
name: self-${{ github.sha }} name: self-${{ github.sha }}
- name: Download ripgrep metrics - name: Download ripgrep-13.0.0 metrics
uses: actions/download-artifact@v3 uses: actions/download-artifact@v3
with: with:
name: ripgrep-${{ github.sha }} name: ripgrep-13.0.0-${{ github.sha }}
- name: Download webrender metrics - name: Download webrender-2022 metrics
uses: actions/download-artifact@v3 uses: actions/download-artifact@v3
with: with:
name: webrender-${{ github.sha }} name: webrender-2022-${{ github.sha }}
- name: Download diesel metrics - name: Download diesel-1.4.8 metrics
uses: actions/download-artifact@v3 uses: actions/download-artifact@v3
with: with:
name: diesel-${{ github.sha }} name: diesel-1.4.8-${{ github.sha }}
- name: Download hyper-0.14.18 metrics
uses: actions/download-artifact@v3
with:
name: hyper-0.14.18-${{ github.sha }}
- name: Combine json - name: Combine json
run: | run: |
git clone --depth 1 https://$METRICS_TOKEN@github.com/rust-analyzer/metrics.git git clone --depth 1 https://$METRICS_TOKEN@github.com/rust-analyzer/metrics.git
jq -s ".[0] * .[1] * .[2] * .[3] * .[4]" build.json self.json ripgrep.json webrender.json diesel.json -c >> metrics/metrics.json jq -s ".[0] * .[1] * .[2] * .[3] * .[4] * .[5]" build.json self.json ripgrep-13.0.0.json webrender-2022.json diesel-1.4.8.json hyper-0.14.18.json -c >> metrics/metrics.json
cd metrics cd metrics
git add . git add .
git -c user.name=Bot -c user.email=dummy@example.com commit --message 📈 git -c user.name=Bot -c user.email=dummy@example.com commit --message 📈

View file

@ -128,6 +128,8 @@ jobs:
- name: Run analysis-stats on rust std library - name: Run analysis-stats on rust std library
if: matrix.target == 'x86_64-unknown-linux-gnu' if: matrix.target == 'x86_64-unknown-linux-gnu'
env:
RUSTC_BOOTSTRAP: 1
run: target/${{ matrix.target }}/release/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std run: target/${{ matrix.target }}/release/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std
- name: Upload artifacts - name: Upload artifacts

116
Cargo.lock generated
View file

@ -381,14 +381,14 @@ dependencies = [
[[package]] [[package]]
name = "filetime" name = "filetime"
version = "0.2.19" version = "0.2.22"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e884668cd0c7480504233e951174ddc3b382f7c2666e3b7310b5c4e7b0c37f9" checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"libc", "libc",
"redox_syscall", "redox_syscall 0.3.5",
"windows-sys 0.42.0", "windows-sys 0.48.0",
] ]
[[package]] [[package]]
@ -541,6 +541,7 @@ dependencies = [
"mbe", "mbe",
"once_cell", "once_cell",
"profile", "profile",
"ra-ap-rustc_parse_format",
"rustc-hash", "rustc-hash",
"smallvec", "smallvec",
"stdx", "stdx",
@ -854,7 +855,6 @@ version = "0.0.0"
dependencies = [ dependencies = [
"dashmap", "dashmap",
"hashbrown 0.12.3", "hashbrown 0.12.3",
"once_cell",
"rustc-hash", "rustc-hash",
"triomphe", "triomphe",
] ]
@ -999,7 +999,7 @@ checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
[[package]] [[package]]
name = "lsp-server" name = "lsp-server"
version = "0.7.3" version = "0.7.4"
dependencies = [ dependencies = [
"crossbeam-channel", "crossbeam-channel",
"log", "log",
@ -1010,9 +1010,9 @@ dependencies = [
[[package]] [[package]]
name = "lsp-server" name = "lsp-server"
version = "0.7.3" version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72417faa455bfb4e5bf14b157d8e2ca2ed74b4e89b8cf42ea2d864825ae5c8a2" checksum = "b52dccdf3302eefab8c8a1273047f0a3c3dca4b527c8458d00c09484c8371928"
dependencies = [ dependencies = [
"crossbeam-channel", "crossbeam-channel",
"log", "log",
@ -1149,20 +1149,21 @@ checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451"
[[package]] [[package]]
name = "notify" name = "notify"
version = "5.1.0" version = "6.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58ea850aa68a06e48fdb069c0ec44d0d64c8dbffa49bf3b6f7f0a901fdea1ba9" checksum = "6205bd8bb1e454ad2e27422015fb5e4f2bcc7e08fa8f27058670d208324a4d2d"
dependencies = [ dependencies = [
"bitflags 1.3.2", "bitflags 2.3.2",
"crossbeam-channel", "crossbeam-channel",
"filetime", "filetime",
"fsevent-sys", "fsevent-sys",
"inotify", "inotify",
"kqueue", "kqueue",
"libc", "libc",
"log",
"mio", "mio",
"walkdir", "walkdir",
"windows-sys 0.42.0", "windows-sys 0.48.0",
] ]
[[package]] [[package]]
@ -1251,7 +1252,7 @@ dependencies = [
"cfg-if", "cfg-if",
"instant", "instant",
"libc", "libc",
"redox_syscall", "redox_syscall 0.2.16",
"smallvec", "smallvec",
"winapi", "winapi",
] ]
@ -1264,7 +1265,7 @@ checksum = "ba1ef8814b5c993410bb3adfad7a5ed269563e4a2f90c41f5d85be7fb47133bf"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"libc", "libc",
"redox_syscall", "redox_syscall 0.2.16",
"smallvec", "smallvec",
"windows-sys 0.42.0", "windows-sys 0.42.0",
] ]
@ -1482,15 +1483,35 @@ dependencies = [
] ]
[[package]] [[package]]
name = "ra-ap-rustc_lexer" name = "ra-ap-rustc_index"
version = "0.1.0" version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1c145702ed3f237918e512685185dc8a4d0edc3a5326c63d20361d8ba9b45b3" checksum = "07b5fa61d34da18e148dc3a81f654488ea07f40938d8aefb17f8b64bb78c6120"
dependencies = [ dependencies = [
"unic-emoji-char", "arrayvec",
"smallvec",
]
[[package]]
name = "ra-ap-rustc_lexer"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2e2f6b48422e4eed5218277ab7cc9733e60dd8f3167f4f36a49a0cafe4dc195"
dependencies = [
"unicode-properties",
"unicode-xid", "unicode-xid",
] ]
[[package]]
name = "ra-ap-rustc_parse_format"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3c7369ad01cc79f9e3513c9f6a6326f6b980100e4862a7ac71b9991c88108bb"
dependencies = [
"ra-ap-rustc_index",
"ra-ap-rustc_lexer",
]
[[package]] [[package]]
name = "rayon" name = "rayon"
version = "1.7.0" version = "1.7.0"
@ -1522,6 +1543,15 @@ dependencies = [
"bitflags 1.3.2", "bitflags 1.3.2",
] ]
[[package]]
name = "redox_syscall"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29"
dependencies = [
"bitflags 1.3.2",
]
[[package]] [[package]]
name = "rowan" name = "rowan"
version = "0.15.11" version = "0.15.11"
@ -1545,7 +1575,6 @@ dependencies = [
"crossbeam-channel", "crossbeam-channel",
"dissimilar", "dissimilar",
"expect-test", "expect-test",
"filetime",
"flycheck", "flycheck",
"hir", "hir",
"hir-def", "hir-def",
@ -1555,7 +1584,7 @@ dependencies = [
"ide-ssr", "ide-ssr",
"itertools", "itertools",
"load-cargo", "load-cargo",
"lsp-server 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", "lsp-server 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)",
"lsp-types", "lsp-types",
"mbe", "mbe",
"mimalloc", "mimalloc",
@ -2056,47 +2085,6 @@ version = "1.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3e5df347f0bf3ec1d670aad6ca5c6a1859cd9ea61d2113125794654ccced68f" checksum = "a3e5df347f0bf3ec1d670aad6ca5c6a1859cd9ea61d2113125794654ccced68f"
[[package]]
name = "unic-char-property"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8c57a407d9b6fa02b4795eb81c5b6652060a15a7903ea981f3d723e6c0be221"
dependencies = [
"unic-char-range",
]
[[package]]
name = "unic-char-range"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0398022d5f700414f6b899e10b8348231abf9173fa93144cbc1a43b9793c1fbc"
[[package]]
name = "unic-common"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "80d7ff825a6a654ee85a63e80f92f054f904f21e7d12da4e22f9834a4aaa35bc"
[[package]]
name = "unic-emoji-char"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b07221e68897210270a38bde4babb655869637af0f69407f96053a34f76494d"
dependencies = [
"unic-char-property",
"unic-char-range",
"unic-ucd-version",
]
[[package]]
name = "unic-ucd-version"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96bd2f2237fe450fcd0a1d2f5f4e91711124f7857ba2e964247776ebeeb7b0c4"
dependencies = [
"unic-common",
]
[[package]] [[package]]
name = "unicase" name = "unicase"
version = "2.6.0" version = "2.6.0"
@ -2127,6 +2115,12 @@ dependencies = [
"tinyvec", "tinyvec",
] ]
[[package]]
name = "unicode-properties"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7f91c8b21fbbaa18853c3d0801c78f4fc94cdb976699bb03e832e75f7fd22f0"
[[package]] [[package]]
name = "unicode-segmentation" name = "unicode-segmentation"
version = "1.10.1" version = "1.10.1"

View file

@ -86,7 +86,7 @@ proc-macro-test = { path = "./crates/proc-macro-test" }
# In-tree crates that are published separately and follow semver. See lib/README.md # In-tree crates that are published separately and follow semver. See lib/README.md
line-index = { version = "0.1.0-pre.1" } line-index = { version = "0.1.0-pre.1" }
la-arena = { version = "0.3.1" } la-arena = { version = "0.3.1" }
lsp-server = { version = "0.7.3" } lsp-server = { version = "0.7.4" }
# non-local crates # non-local crates
smallvec = { version = "1.10.0", features = [ smallvec = { version = "1.10.0", features = [
@ -97,11 +97,15 @@ smallvec = { version = "1.10.0", features = [
smol_str = "0.2.0" smol_str = "0.2.0"
nohash-hasher = "0.2.0" nohash-hasher = "0.2.0"
text-size = "1.1.0" text-size = "1.1.0"
# See https://github.com/serde-rs/serde/issues/2538#issuecomment-1684517372 for why we pin serde serde = { version = "1.0.156", features = ["derive"] }
serde = { version = "1.0.156, < 1.0.172", features = ["derive"] }
serde_json = "1.0.96" serde_json = "1.0.96"
triomphe = { version = "0.1.8", default-features = false, features = ["std"] } triomphe = { version = "0.1.8", default-features = false, features = ["std"] }
# can't upgrade due to dashmap depending on 0.12.3 currently # can't upgrade due to dashmap depending on 0.12.3 currently
hashbrown = { version = "0.12.3", features = ["inline-more"], default-features = false } hashbrown = { version = "0.12.3", features = ["inline-more"], default-features = false }
rustc_lexer = { version = "0.1.0", package = "ra-ap-rustc_lexer" } rustc_lexer = { version = "0.10.0", package = "ra-ap-rustc_lexer" }
rustc_parse_format = { version = "0.10.0", package = "ra-ap-rustc_parse_format", default-features = false }
# Upstream broke this for us so we can't update it
rustc_abi = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_abi", default-features = false }
rustc_index = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_index", default-features = false }

View file

@ -179,8 +179,8 @@ impl ChangeFixture {
meta.edition, meta.edition,
Some(crate_name.clone().into()), Some(crate_name.clone().into()),
version, version,
meta.cfg, meta.cfg.clone(),
Default::default(), Some(meta.cfg),
meta.env, meta.env,
false, false,
origin, origin,
@ -200,7 +200,7 @@ impl ChangeFixture {
} else if meta.path == "/main.rs" || meta.path == "/lib.rs" { } else if meta.path == "/main.rs" || meta.path == "/lib.rs" {
assert!(default_crate_root.is_none()); assert!(default_crate_root.is_none());
default_crate_root = Some(file_id); default_crate_root = Some(file_id);
default_cfg = meta.cfg; default_cfg.extend(meta.cfg.into_iter());
default_env.extend(meta.env.iter().map(|(x, y)| (x.to_owned(), y.to_owned()))); default_env.extend(meta.env.iter().map(|(x, y)| (x.to_owned(), y.to_owned())));
default_target_data_layout = meta.target_data_layout; default_target_data_layout = meta.target_data_layout;
} }
@ -220,8 +220,8 @@ impl ChangeFixture {
Edition::CURRENT, Edition::CURRENT,
Some(CrateName::new("test").unwrap().into()), Some(CrateName::new("test").unwrap().into()),
None, None,
default_cfg, default_cfg.clone(),
Default::default(), Some(default_cfg),
default_env, default_env,
false, false,
CrateOrigin::Local { repo: None, name: None }, CrateOrigin::Local { repo: None, name: None },

View file

@ -86,6 +86,32 @@ impl CfgOptions {
} }
} }
impl Extend<CfgAtom> for CfgOptions {
fn extend<T: IntoIterator<Item = CfgAtom>>(&mut self, iter: T) {
iter.into_iter().for_each(|cfg_flag| _ = self.enabled.insert(cfg_flag));
}
}
impl IntoIterator for CfgOptions {
type Item = <FxHashSet<CfgAtom> as IntoIterator>::Item;
type IntoIter = <FxHashSet<CfgAtom> as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
<FxHashSet<CfgAtom> as IntoIterator>::into_iter(self.enabled)
}
}
impl<'a> IntoIterator for &'a CfgOptions {
type Item = <&'a FxHashSet<CfgAtom> as IntoIterator>::Item;
type IntoIter = <&'a FxHashSet<CfgAtom> as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
<&FxHashSet<CfgAtom> as IntoIterator>::into_iter(&self.enabled)
}
}
#[derive(Default, Clone, Debug, PartialEq, Eq)] #[derive(Default, Clone, Debug, PartialEq, Eq)]
pub struct CfgDiff { pub struct CfgDiff {
// Invariants: No duplicates, no atom that's both in `enable` and `disable`. // Invariants: No duplicates, no atom that's both in `enable` and `disable`.

View file

@ -5,7 +5,9 @@
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] #![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
use std::{ use std::{
ffi::OsString,
fmt, io, fmt, io,
path::PathBuf,
process::{ChildStderr, ChildStdout, Command, Stdio}, process::{ChildStderr, ChildStdout, Command, Stdio},
time::Duration, time::Duration,
}; };
@ -168,7 +170,7 @@ struct FlycheckActor {
/// doesn't provide a way to read sub-process output without blocking, so we /// doesn't provide a way to read sub-process output without blocking, so we
/// have to wrap sub-processes output handling in a thread and pass messages /// have to wrap sub-processes output handling in a thread and pass messages
/// back over a channel. /// back over a channel.
cargo_handle: Option<CargoHandle>, command_handle: Option<CommandHandle>,
} }
enum Event { enum Event {
@ -184,7 +186,7 @@ impl FlycheckActor {
workspace_root: AbsPathBuf, workspace_root: AbsPathBuf,
) -> FlycheckActor { ) -> FlycheckActor {
tracing::info!(%id, ?workspace_root, "Spawning flycheck"); tracing::info!(%id, ?workspace_root, "Spawning flycheck");
FlycheckActor { id, sender, config, root: workspace_root, cargo_handle: None } FlycheckActor { id, sender, config, root: workspace_root, command_handle: None }
} }
fn report_progress(&self, progress: Progress) { fn report_progress(&self, progress: Progress) {
@ -192,7 +194,7 @@ impl FlycheckActor {
} }
fn next_event(&self, inbox: &Receiver<StateChange>) -> Option<Event> { fn next_event(&self, inbox: &Receiver<StateChange>) -> Option<Event> {
let check_chan = self.cargo_handle.as_ref().map(|cargo| &cargo.receiver); let check_chan = self.command_handle.as_ref().map(|cargo| &cargo.receiver);
if let Ok(msg) = inbox.try_recv() { if let Ok(msg) = inbox.try_recv() {
// give restarts a preference so check outputs don't block a restart or stop // give restarts a preference so check outputs don't block a restart or stop
return Some(Event::RequestStateChange(msg)); return Some(Event::RequestStateChange(msg));
@ -221,21 +223,19 @@ impl FlycheckActor {
} }
let command = self.check_command(); let command = self.check_command();
let formatted_command = format!("{:?}", command);
tracing::debug!(?command, "will restart flycheck"); tracing::debug!(?command, "will restart flycheck");
match CargoHandle::spawn(command) { match CommandHandle::spawn(command) {
Ok(cargo_handle) => { Ok(command_handle) => {
tracing::debug!( tracing::debug!(command = formatted_command, "did restart flycheck");
command = ?self.check_command(), self.command_handle = Some(command_handle);
"did restart flycheck"
);
self.cargo_handle = Some(cargo_handle);
self.report_progress(Progress::DidStart); self.report_progress(Progress::DidStart);
} }
Err(error) => { Err(error) => {
self.report_progress(Progress::DidFailToRestart(format!( self.report_progress(Progress::DidFailToRestart(format!(
"Failed to run the following command: {:?} error={}", "Failed to run the following command: {} error={}",
self.check_command(), formatted_command, error
error
))); )));
} }
} }
@ -244,12 +244,14 @@ impl FlycheckActor {
tracing::debug!(flycheck_id = self.id, "flycheck finished"); tracing::debug!(flycheck_id = self.id, "flycheck finished");
// Watcher finished // Watcher finished
let cargo_handle = self.cargo_handle.take().unwrap(); let command_handle = self.command_handle.take().unwrap();
let res = cargo_handle.join(); let formatted_handle = format!("{:?}", command_handle);
let res = command_handle.join();
if res.is_err() { if res.is_err() {
tracing::error!( tracing::error!(
"Flycheck failed to run the following command: {:?}", "Flycheck failed to run the following command: {}",
self.check_command() formatted_handle
); );
} }
self.report_progress(Progress::DidFinish(res)); self.report_progress(Progress::DidFinish(res));
@ -284,12 +286,12 @@ impl FlycheckActor {
} }
fn cancel_check_process(&mut self) { fn cancel_check_process(&mut self) {
if let Some(cargo_handle) = self.cargo_handle.take() { if let Some(command_handle) = self.command_handle.take() {
tracing::debug!( tracing::debug!(
command = ?self.check_command(), command = ?command_handle,
"did cancel flycheck" "did cancel flycheck"
); );
cargo_handle.cancel(); command_handle.cancel();
self.report_progress(Progress::DidCancel); self.report_progress(Progress::DidCancel);
} }
} }
@ -391,19 +393,36 @@ impl Drop for JodGroupChild {
} }
/// A handle to a cargo process used for fly-checking. /// A handle to a cargo process used for fly-checking.
struct CargoHandle { struct CommandHandle {
/// The handle to the actual cargo process. As we cannot cancel directly from with /// The handle to the actual cargo process. As we cannot cancel directly from with
/// a read syscall dropping and therefore terminating the process is our best option. /// a read syscall dropping and therefore terminating the process is our best option.
child: JodGroupChild, child: JodGroupChild,
thread: stdx::thread::JoinHandle<io::Result<(bool, String)>>, thread: stdx::thread::JoinHandle<io::Result<(bool, String)>>,
receiver: Receiver<CargoMessage>, receiver: Receiver<CargoMessage>,
program: OsString,
arguments: Vec<OsString>,
current_dir: Option<PathBuf>,
} }
impl CargoHandle { impl fmt::Debug for CommandHandle {
fn spawn(mut command: Command) -> std::io::Result<CargoHandle> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("CommandHandle")
.field("program", &self.program)
.field("arguments", &self.arguments)
.field("current_dir", &self.current_dir)
.finish()
}
}
impl CommandHandle {
fn spawn(mut command: Command) -> std::io::Result<CommandHandle> {
command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null()); command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null());
let mut child = command.group_spawn().map(JodGroupChild)?; let mut child = command.group_spawn().map(JodGroupChild)?;
let program = command.get_program().into();
let arguments = command.get_args().map(|arg| arg.into()).collect::<Vec<OsString>>();
let current_dir = command.get_current_dir().map(|arg| arg.to_path_buf());
let stdout = child.0.inner().stdout.take().unwrap(); let stdout = child.0.inner().stdout.take().unwrap();
let stderr = child.0.inner().stderr.take().unwrap(); let stderr = child.0.inner().stderr.take().unwrap();
@ -413,7 +432,7 @@ impl CargoHandle {
.name("CargoHandle".to_owned()) .name("CargoHandle".to_owned())
.spawn(move || actor.run()) .spawn(move || actor.run())
.expect("failed to spawn thread"); .expect("failed to spawn thread");
Ok(CargoHandle { child, thread, receiver }) Ok(CommandHandle { program, arguments, current_dir, child, thread, receiver })
} }
fn cancel(mut self) { fn cancel(mut self) {

View file

@ -31,8 +31,10 @@ smallvec.workspace = true
hashbrown.workspace = true hashbrown.workspace = true
triomphe.workspace = true triomphe.workspace = true
rustc_abi = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_abi", default-features = false } rustc_abi.workspace = true
rustc_index = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_index", default-features = false } rustc_index.workspace = true
rustc_parse_format.workspace = true
# local deps # local deps
stdx.workspace = true stdx.workspace = true

View file

@ -5,7 +5,7 @@ pub mod builtin;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
use std::{hash::Hash, ops}; use std::{hash::Hash, ops, slice::Iter as SliceIter};
use base_db::CrateId; use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions}; use cfg::{CfgExpr, CfgOptions};
@ -14,12 +14,11 @@ use hir_expand::{
attrs::{collect_attrs, Attr, AttrId, RawAttrs}, attrs::{collect_attrs, Attr, AttrId, RawAttrs},
HirFileId, InFile, HirFileId, InFile,
}; };
use itertools::Itertools;
use la_arena::{ArenaMap, Idx, RawIdx}; use la_arena::{ArenaMap, Idx, RawIdx};
use mbe::DelimiterKind; use mbe::DelimiterKind;
use syntax::{ use syntax::{
ast::{self, HasAttrs, IsString}, ast::{self, HasAttrs},
AstPtr, AstToken, SmolStr, TextRange, TextSize, AstPtr, SmolStr,
}; };
use triomphe::Arc; use triomphe::Arc;
@ -33,26 +32,6 @@ use crate::{
LocalFieldId, Lookup, MacroId, VariantId, LocalFieldId, Lookup, MacroId, VariantId,
}; };
/// Holds documentation
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Documentation(String);
impl Documentation {
pub fn new(s: String) -> Self {
Documentation(s)
}
pub fn as_str(&self) -> &str {
&self.0
}
}
impl From<Documentation> for String {
fn from(Documentation(string): Documentation) -> Self {
string
}
}
#[derive(Default, Debug, Clone, PartialEq, Eq)] #[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct Attrs(RawAttrs); pub struct Attrs(RawAttrs);
@ -221,33 +200,6 @@ impl Attrs {
self.by_key("lang").string_value().and_then(|it| LangItem::from_str(it)) self.by_key("lang").string_value().and_then(|it| LangItem::from_str(it))
} }
pub fn docs(&self) -> Option<Documentation> {
let docs = self.by_key("doc").attrs().filter_map(|attr| attr.string_value());
let indent = doc_indent(self);
let mut buf = String::new();
for doc in docs {
// str::lines doesn't yield anything for the empty string
if !doc.is_empty() {
buf.extend(Itertools::intersperse(
doc.lines().map(|line| {
line.char_indices()
.nth(indent)
.map_or(line, |(offset, _)| &line[offset..])
.trim_end()
}),
"\n",
));
}
buf.push('\n');
}
buf.pop();
if buf.is_empty() {
None
} else {
Some(Documentation(buf))
}
}
pub fn has_doc_hidden(&self) -> bool { pub fn has_doc_hidden(&self) -> bool {
self.by_key("doc").tt_values().any(|tt| { self.by_key("doc").tt_values().any(|tt| {
tt.delimiter.kind == DelimiterKind::Parenthesis && tt.delimiter.kind == DelimiterKind::Parenthesis &&
@ -299,7 +251,6 @@ impl Attrs {
} }
} }
use std::slice::Iter as SliceIter;
#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)] #[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)]
pub enum DocAtom { pub enum DocAtom {
/// eg. `#[doc(hidden)]` /// eg. `#[doc(hidden)]`
@ -313,7 +264,6 @@ pub enum DocAtom {
// Adapted from `CfgExpr` parsing code // Adapted from `CfgExpr` parsing code
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
// #[cfg_attr(test, derive(derive_arbitrary::Arbitrary))]
pub enum DocExpr { pub enum DocExpr {
Invalid, Invalid,
/// eg. `#[doc(hidden)]`, `#[doc(alias = "x")]` /// eg. `#[doc(hidden)]`, `#[doc(alias = "x")]`
@ -574,62 +524,6 @@ impl AttrsWithOwner {
AttrSourceMap::new(owner.as_ref().map(|node| node as &dyn HasAttrs)) AttrSourceMap::new(owner.as_ref().map(|node| node as &dyn HasAttrs))
} }
pub fn docs_with_rangemap(
&self,
db: &dyn DefDatabase,
) -> Option<(Documentation, DocsRangeMap)> {
let docs =
self.by_key("doc").attrs().filter_map(|attr| attr.string_value().map(|s| (s, attr.id)));
let indent = doc_indent(self);
let mut buf = String::new();
let mut mapping = Vec::new();
for (doc, idx) in docs {
if !doc.is_empty() {
let mut base_offset = 0;
for raw_line in doc.split('\n') {
let line = raw_line.trim_end();
let line_len = line.len();
let (offset, line) = match line.char_indices().nth(indent) {
Some((offset, _)) => (offset, &line[offset..]),
None => (0, line),
};
let buf_offset = buf.len();
buf.push_str(line);
mapping.push((
TextRange::new(buf_offset.try_into().ok()?, buf.len().try_into().ok()?),
idx,
TextRange::at(
(base_offset + offset).try_into().ok()?,
line_len.try_into().ok()?,
),
));
buf.push('\n');
base_offset += raw_line.len() + 1;
}
} else {
buf.push('\n');
}
}
buf.pop();
if buf.is_empty() {
None
} else {
Some((Documentation(buf), DocsRangeMap { mapping, source_map: self.source_map(db) }))
}
}
}
fn doc_indent(attrs: &Attrs) -> usize {
attrs
.by_key("doc")
.attrs()
.filter_map(|attr| attr.string_value())
.flat_map(|s| s.lines())
.filter(|line| !line.chars().all(|c| c.is_whitespace()))
.map(|line| line.chars().take_while(|c| c.is_whitespace()).count())
.min()
.unwrap_or(0)
} }
#[derive(Debug)] #[derive(Debug)]
@ -673,7 +567,7 @@ impl AttrSourceMap {
self.source_of_id(attr.id) self.source_of_id(attr.id)
} }
fn source_of_id(&self, id: AttrId) -> InFile<&Either<ast::Attr, ast::Comment>> { pub fn source_of_id(&self, id: AttrId) -> InFile<&Either<ast::Attr, ast::Comment>> {
let ast_idx = id.ast_index(); let ast_idx = id.ast_index();
let file_id = match self.mod_def_site_file_id { let file_id = match self.mod_def_site_file_id {
Some((file_id, def_site_cut)) if def_site_cut <= ast_idx => file_id, Some((file_id, def_site_cut)) if def_site_cut <= ast_idx => file_id,
@ -687,69 +581,6 @@ impl AttrSourceMap {
} }
} }
/// A struct to map text ranges from [`Documentation`] back to TextRanges in the syntax tree.
#[derive(Debug)]
pub struct DocsRangeMap {
source_map: AttrSourceMap,
// (docstring-line-range, attr_index, attr-string-range)
// a mapping from the text range of a line of the [`Documentation`] to the attribute index and
// the original (untrimmed) syntax doc line
mapping: Vec<(TextRange, AttrId, TextRange)>,
}
impl DocsRangeMap {
/// Maps a [`TextRange`] relative to the documentation string back to its AST range
pub fn map(&self, range: TextRange) -> Option<InFile<TextRange>> {
let found = self.mapping.binary_search_by(|(probe, ..)| probe.ordering(range)).ok()?;
let (line_docs_range, idx, original_line_src_range) = self.mapping[found];
if !line_docs_range.contains_range(range) {
return None;
}
let relative_range = range - line_docs_range.start();
let InFile { file_id, value: source } = self.source_map.source_of_id(idx);
match source {
Either::Left(attr) => {
let string = get_doc_string_in_attr(attr)?;
let text_range = string.open_quote_text_range()?;
let range = TextRange::at(
text_range.end() + original_line_src_range.start() + relative_range.start(),
string.syntax().text_range().len().min(range.len()),
);
Some(InFile { file_id, value: range })
}
Either::Right(comment) => {
let text_range = comment.syntax().text_range();
let range = TextRange::at(
text_range.start()
+ TextSize::try_from(comment.prefix().len()).ok()?
+ original_line_src_range.start()
+ relative_range.start(),
text_range.len().min(range.len()),
);
Some(InFile { file_id, value: range })
}
}
}
}
fn get_doc_string_in_attr(it: &ast::Attr) -> Option<ast::String> {
match it.expr() {
// #[doc = lit]
Some(ast::Expr::Literal(lit)) => match lit.kind() {
ast::LiteralKind::String(it) => Some(it),
_ => None,
},
// #[cfg_attr(..., doc = "", ...)]
None => {
// FIXME: See highlight injection for what to do here
None
}
_ => None,
}
}
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub struct AttrQuery<'attr> { pub struct AttrQuery<'attr> {
attrs: &'attr Attrs, attrs: &'attr Attrs,

View file

@ -8,7 +8,8 @@
//! name resolution, and `BUILTIN_ATTRIBUTES` is almost entirely unchanged from the original, to //! name resolution, and `BUILTIN_ATTRIBUTES` is almost entirely unchanged from the original, to
//! ease updating. //! ease updating.
use once_cell::sync::OnceCell; use std::sync::OnceLock;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
/// Ignored attribute namespaces used by tools. /// Ignored attribute namespaces used by tools.
@ -29,7 +30,7 @@ pub struct AttributeTemplate {
} }
pub fn find_builtin_attr_idx(name: &str) -> Option<usize> { pub fn find_builtin_attr_idx(name: &str) -> Option<usize> {
static BUILTIN_LOOKUP_TABLE: OnceCell<FxHashMap<&'static str, usize>> = OnceCell::new(); static BUILTIN_LOOKUP_TABLE: OnceLock<FxHashMap<&'static str, usize>> = OnceLock::new();
BUILTIN_LOOKUP_TABLE BUILTIN_LOOKUP_TABLE
.get_or_init(|| { .get_or_init(|| {
INERT_ATTRIBUTES.iter().map(|attr| attr.name).enumerate().map(|(a, b)| (b, a)).collect() INERT_ATTRIBUTES.iter().map(|attr| attr.name).enumerate().map(|(a, b)| (b, a)).collect()

View file

@ -65,6 +65,8 @@ pub type LabelSource = InFile<LabelPtr>;
pub type FieldPtr = AstPtr<ast::RecordExprField>; pub type FieldPtr = AstPtr<ast::RecordExprField>;
pub type FieldSource = InFile<FieldPtr>; pub type FieldSource = InFile<FieldPtr>;
pub type PatFieldPtr = AstPtr<ast::RecordPatField>;
pub type PatFieldSource = InFile<PatFieldPtr>;
/// An item body together with the mapping from syntax nodes to HIR expression /// An item body together with the mapping from syntax nodes to HIR expression
/// IDs. This is needed to go from e.g. a position in a file to the HIR /// IDs. This is needed to go from e.g. a position in a file to the HIR
@ -90,8 +92,8 @@ pub struct BodySourceMap {
/// We don't create explicit nodes for record fields (`S { record_field: 92 }`). /// We don't create explicit nodes for record fields (`S { record_field: 92 }`).
/// Instead, we use id of expression (`92`) to identify the field. /// Instead, we use id of expression (`92`) to identify the field.
field_map: FxHashMap<FieldSource, ExprId>,
field_map_back: FxHashMap<ExprId, FieldSource>, field_map_back: FxHashMap<ExprId, FieldSource>,
pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, HirFileId>, expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, HirFileId>,
@ -164,9 +166,10 @@ impl Body {
}; };
let module = def.module(db); let module = def.module(db);
let expander = Expander::new(db, file_id, module); let expander = Expander::new(db, file_id, module);
let (mut body, source_map) = let (mut body, mut source_map) =
Body::new(db, def, expander, params, body, module.krate, is_async_fn); Body::new(db, def, expander, params, body, module.krate, is_async_fn);
body.shrink_to_fit(); body.shrink_to_fit();
source_map.shrink_to_fit();
(Arc::new(body), Arc::new(source_map)) (Arc::new(body), Arc::new(source_map))
} }
@ -375,9 +378,8 @@ impl BodySourceMap {
self.field_map_back[&expr].clone() self.field_map_back[&expr].clone()
} }
pub fn node_field(&self, node: InFile<&ast::RecordExprField>) -> Option<ExprId> { pub fn pat_field_syntax(&self, pat: PatId) -> PatFieldSource {
let src = node.map(AstPtr::new); self.pat_field_map_back[&pat].clone()
self.field_map.get(&src).cloned()
} }
pub fn macro_expansion_expr(&self, node: InFile<&ast::MacroExpr>) -> Option<ExprId> { pub fn macro_expansion_expr(&self, node: InFile<&ast::MacroExpr>) -> Option<ExprId> {
@ -389,4 +391,29 @@ impl BodySourceMap {
pub fn diagnostics(&self) -> &[BodyDiagnostic] { pub fn diagnostics(&self) -> &[BodyDiagnostic] {
&self.diagnostics &self.diagnostics
} }
fn shrink_to_fit(&mut self) {
let Self {
expr_map,
expr_map_back,
pat_map,
pat_map_back,
label_map,
label_map_back,
field_map_back,
pat_field_map_back,
expansions,
diagnostics,
} = self;
expr_map.shrink_to_fit();
expr_map_back.shrink_to_fit();
pat_map.shrink_to_fit();
pat_map_back.shrink_to_fit();
label_map.shrink_to_fit();
label_map_back.shrink_to_fit();
field_map_back.shrink_to_fit();
pat_field_map_back.shrink_to_fit();
expansions.shrink_to_fit();
diagnostics.shrink_to_fit();
}
} }

View file

@ -25,13 +25,20 @@ use triomphe::Arc;
use crate::{ use crate::{
body::{Body, BodyDiagnostic, BodySourceMap, ExprPtr, LabelPtr, PatPtr}, body::{Body, BodyDiagnostic, BodySourceMap, ExprPtr, LabelPtr, PatPtr},
builtin_type::BuiltinUint,
data::adt::StructKind, data::adt::StructKind,
db::DefDatabase, db::DefDatabase,
expander::Expander, expander::Expander,
hir::{ hir::{
dummy_expr_id, Array, Binding, BindingAnnotation, BindingId, BindingProblems, CaptureBy, dummy_expr_id,
ClosureKind, Expr, ExprId, Label, LabelId, Literal, LiteralOrConst, MatchArm, Movability, format_args::{
Pat, PatId, RecordFieldPat, RecordLitField, Statement, self, FormatAlignment, FormatArgs, FormatArgsPiece, FormatArgument, FormatArgumentKind,
FormatArgumentsCollector, FormatCount, FormatDebugHex, FormatOptions,
FormatPlaceholder, FormatSign, FormatTrait,
},
Array, Binding, BindingAnnotation, BindingId, BindingProblems, CaptureBy, ClosureKind,
Expr, ExprId, InlineAsm, Label, LabelId, Literal, LiteralOrConst, MatchArm, Movability,
OffsetOf, Pat, PatId, RecordFieldPat, RecordLitField, Statement,
}, },
item_scope::BuiltinShadowMode, item_scope::BuiltinShadowMode,
lang_item::LangItem, lang_item::LangItem,
@ -42,6 +49,8 @@ use crate::{
AdtId, BlockId, BlockLoc, ConstBlockLoc, DefWithBodyId, ModuleDefId, UnresolvedMacro, AdtId, BlockId, BlockLoc, ConstBlockLoc, DefWithBodyId, ModuleDefId, UnresolvedMacro,
}; };
type FxIndexSet<K> = indexmap::IndexSet<K, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
pub(super) fn lower( pub(super) fn lower(
db: &dyn DefDatabase, db: &dyn DefDatabase,
owner: DefWithBodyId, owner: DefWithBodyId,
@ -437,7 +446,6 @@ impl ExprCollector<'_> {
None => self.missing_expr(), None => self.missing_expr(),
}; };
let src = self.expander.to_source(AstPtr::new(&field)); let src = self.expander.to_source(AstPtr::new(&field));
self.source_map.field_map.insert(src.clone(), expr);
self.source_map.field_map_back.insert(expr, src); self.source_map.field_map_back.insert(expr, src);
Some(RecordLitField { name, expr }) Some(RecordLitField { name, expr })
}) })
@ -579,11 +587,6 @@ impl ExprCollector<'_> {
syntax_ptr, syntax_ptr,
) )
} }
ast::Expr::BoxExpr(e) => {
let expr = self.collect_expr_opt(e.expr());
self.alloc_expr(Expr::Box { expr }, syntax_ptr)
}
ast::Expr::ArrayExpr(e) => { ast::Expr::ArrayExpr(e) => {
let kind = e.kind(); let kind = e.kind();
@ -653,6 +656,16 @@ impl ExprCollector<'_> {
} }
} }
ast::Expr::UnderscoreExpr(_) => self.alloc_expr(Expr::Underscore, syntax_ptr), ast::Expr::UnderscoreExpr(_) => self.alloc_expr(Expr::Underscore, syntax_ptr),
ast::Expr::AsmExpr(e) => {
let e = self.collect_expr_opt(e.expr());
self.alloc_expr(Expr::InlineAsm(InlineAsm { e }), syntax_ptr)
}
ast::Expr::OffsetOfExpr(e) => {
let container = Interned::new(TypeRef::from_ast_opt(&self.ctx(), e.ty()));
let fields = e.fields().map(|it| it.as_name()).collect();
self.alloc_expr(Expr::OffsetOf(OffsetOf { container, fields }), syntax_ptr)
}
ast::Expr::FormatArgsExpr(f) => self.collect_format_args(f, syntax_ptr),
}) })
} }
@ -663,6 +676,7 @@ impl ExprCollector<'_> {
let result_expr_id = self.alloc_expr(Expr::Missing, syntax_ptr); let result_expr_id = self.alloc_expr(Expr::Missing, syntax_ptr);
let prev_binding_owner = self.current_binding_owner.take(); let prev_binding_owner = self.current_binding_owner.take();
self.current_binding_owner = Some(result_expr_id); self.current_binding_owner = Some(result_expr_id);
(result_expr_id, prev_binding_owner) (result_expr_id, prev_binding_owner)
} }
@ -744,7 +758,27 @@ impl ExprCollector<'_> {
fn collect_while_loop(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::WhileExpr) -> ExprId { fn collect_while_loop(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::WhileExpr) -> ExprId {
let label = e.label().map(|label| self.collect_label(label)); let label = e.label().map(|label| self.collect_label(label));
let body = self.collect_labelled_block_opt(label, e.loop_body()); let body = self.collect_labelled_block_opt(label, e.loop_body());
let condition = self.collect_expr_opt(e.condition());
// Labels can also be used in the condition expression, like this:
// ```
// fn main() {
// let mut optional = Some(0);
// 'my_label: while let Some(a) = match optional {
// None => break 'my_label,
// Some(val) => Some(val),
// } {
// println!("{}", a);
// optional = None;
// }
// }
// ```
let condition = match label {
Some(label) => {
self.with_labeled_rib(label, |this| this.collect_expr_opt(e.condition()))
}
None => self.collect_expr_opt(e.condition()),
};
let break_expr = let break_expr =
self.alloc_expr(Expr::Break { expr: None, label: None }, syntax_ptr.clone()); self.alloc_expr(Expr::Break { expr: None, label: None }, syntax_ptr.clone());
let if_expr = self.alloc_expr( let if_expr = self.alloc_expr(
@ -1295,23 +1329,21 @@ impl ExprCollector<'_> {
ast::Pat::RecordPat(p) => { ast::Pat::RecordPat(p) => {
let path = let path =
p.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new); p.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new);
let args = p let record_pat_field_list =
.record_pat_field_list() &p.record_pat_field_list().expect("every struct should have a field list");
.expect("every struct should have a field list") let args = record_pat_field_list
.fields() .fields()
.filter_map(|f| { .filter_map(|f| {
let ast_pat = f.pat()?; let ast_pat = f.pat()?;
let pat = self.collect_pat(ast_pat, binding_list); let pat = self.collect_pat(ast_pat, binding_list);
let name = f.field_name()?.as_name(); let name = f.field_name()?.as_name();
let src = self.expander.to_source(AstPtr::new(&f));
self.source_map.pat_field_map_back.insert(pat, src);
Some(RecordFieldPat { name, pat }) Some(RecordFieldPat { name, pat })
}) })
.collect(); .collect();
let ellipsis = p let ellipsis = record_pat_field_list.rest_pat().is_some();
.record_pat_field_list()
.expect("every struct should have a field list")
.rest_pat()
.is_some();
Pat::Record { path, args, ellipsis } Pat::Record { path, args, ellipsis }
} }
@ -1531,6 +1563,401 @@ impl ExprCollector<'_> {
} }
} }
// endregion: labels // endregion: labels
// region: format
fn expand_macros_to_string(&mut self, expr: ast::Expr) -> Option<(ast::String, bool)> {
let m = match expr {
ast::Expr::MacroExpr(m) => m,
ast::Expr::Literal(l) => {
return match l.kind() {
ast::LiteralKind::String(s) => Some((s, true)),
_ => None,
}
}
_ => return None,
};
let e = m.macro_call()?;
let macro_ptr = AstPtr::new(&e);
let (exp, _) = self.collect_macro_call(e, macro_ptr, true, |this, expansion| {
expansion.and_then(|it| this.expand_macros_to_string(it))
})?;
Some((exp, false))
}
fn collect_format_args(
&mut self,
f: ast::FormatArgsExpr,
syntax_ptr: AstPtr<ast::Expr>,
) -> ExprId {
let mut args = FormatArgumentsCollector::new();
f.args().for_each(|arg| {
args.add(FormatArgument {
kind: match arg.name() {
Some(name) => FormatArgumentKind::Named(name.as_name()),
None => FormatArgumentKind::Normal,
},
expr: self.collect_expr_opt(arg.expr()),
});
});
let template = f.template();
let fmt_snippet = template.as_ref().map(ToString::to_string);
let fmt = match template.and_then(|it| self.expand_macros_to_string(it)) {
Some((s, is_direct_literal)) => {
format_args::parse(&s, fmt_snippet, args, is_direct_literal, |name| {
self.alloc_expr_desugared(Expr::Path(Path::from(name)))
})
}
None => FormatArgs { template: Default::default(), arguments: args.finish() },
};
// Create a list of all _unique_ (argument, format trait) combinations.
// E.g. "{0} {0:x} {0} {1}" -> [(0, Display), (0, LowerHex), (1, Display)]
let mut argmap = FxIndexSet::default();
for piece in fmt.template.iter() {
let FormatArgsPiece::Placeholder(placeholder) = piece else { continue };
if let Ok(index) = placeholder.argument.index {
argmap.insert((index, ArgumentType::Format(placeholder.format_trait)));
}
}
let lit_pieces =
fmt.template
.iter()
.enumerate()
.filter_map(|(i, piece)| {
match piece {
FormatArgsPiece::Literal(s) => Some(
self.alloc_expr_desugared(Expr::Literal(Literal::String(s.clone()))),
),
&FormatArgsPiece::Placeholder(_) => {
// Inject empty string before placeholders when not already preceded by a literal piece.
if i == 0
|| matches!(fmt.template[i - 1], FormatArgsPiece::Placeholder(_))
{
Some(self.alloc_expr_desugared(Expr::Literal(Literal::String(
"".into(),
))))
} else {
None
}
}
}
})
.collect();
let lit_pieces = self.alloc_expr_desugared(Expr::Array(Array::ElementList {
elements: lit_pieces,
is_assignee_expr: false,
}));
let lit_pieces = self.alloc_expr_desugared(Expr::Ref {
expr: lit_pieces,
rawness: Rawness::Ref,
mutability: Mutability::Shared,
});
let format_options = {
// Generate:
// &[format_spec_0, format_spec_1, format_spec_2]
let elements = fmt
.template
.iter()
.filter_map(|piece| {
let FormatArgsPiece::Placeholder(placeholder) = piece else { return None };
Some(self.make_format_spec(placeholder, &mut argmap))
})
.collect();
let array = self.alloc_expr_desugared(Expr::Array(Array::ElementList {
elements,
is_assignee_expr: false,
}));
self.alloc_expr_desugared(Expr::Ref {
expr: array,
rawness: Rawness::Ref,
mutability: Mutability::Shared,
})
};
let arguments = &*fmt.arguments.arguments;
let args = if arguments.is_empty() {
let expr = self.alloc_expr_desugared(Expr::Array(Array::ElementList {
elements: Box::default(),
is_assignee_expr: false,
}));
self.alloc_expr_desugared(Expr::Ref {
expr,
rawness: Rawness::Ref,
mutability: Mutability::Shared,
})
} else {
// Generate:
// &match (&arg0, &arg1, &…) {
// args => [
// <core::fmt::Argument>::new_display(args.0),
// <core::fmt::Argument>::new_lower_hex(args.1),
// <core::fmt::Argument>::new_debug(args.0),
// …
// ]
// }
let args = argmap
.iter()
.map(|&(arg_index, ty)| {
let arg = self.alloc_expr_desugared(Expr::Ref {
expr: arguments[arg_index].expr,
rawness: Rawness::Ref,
mutability: Mutability::Shared,
});
self.make_argument(arg, ty)
})
.collect();
let array = self.alloc_expr_desugared(Expr::Array(Array::ElementList {
elements: args,
is_assignee_expr: false,
}));
self.alloc_expr_desugared(Expr::Ref {
expr: array,
rawness: Rawness::Ref,
mutability: Mutability::Shared,
})
};
// Generate:
// <core::fmt::Arguments>::new_v1_formatted(
// lit_pieces,
// args,
// format_options,
// unsafe { ::core::fmt::UnsafeArg::new() }
// )
let Some(new_v1_formatted) =
LangItem::FormatArguments.ty_rel_path(self.db, self.krate, name![new_v1_formatted])
else {
return self.missing_expr();
};
let Some(unsafe_arg_new) =
LangItem::FormatUnsafeArg.ty_rel_path(self.db, self.krate, name![new])
else {
return self.missing_expr();
};
let new_v1_formatted = self.alloc_expr_desugared(Expr::Path(new_v1_formatted));
let unsafe_arg_new = self.alloc_expr_desugared(Expr::Path(unsafe_arg_new));
let unsafe_arg_new = self.alloc_expr_desugared(Expr::Call {
callee: unsafe_arg_new,
args: Box::default(),
is_assignee_expr: false,
});
let unsafe_arg_new = self.alloc_expr_desugared(Expr::Unsafe {
id: None,
statements: Box::default(),
tail: Some(unsafe_arg_new),
});
self.alloc_expr(
Expr::Call {
callee: new_v1_formatted,
args: Box::new([lit_pieces, args, format_options, unsafe_arg_new]),
is_assignee_expr: false,
},
syntax_ptr,
)
}
/// Generate a hir expression for a format_args placeholder specification.
///
/// Generates
///
/// ```text
/// <core::fmt::rt::Placeholder::new(
/// …usize, // position
/// '…', // fill
/// <core::fmt::rt::Alignment>::…, // alignment
/// …u32, // flags
/// <core::fmt::rt::Count::…>, // width
/// <core::fmt::rt::Count::…>, // precision
/// )
/// ```
fn make_format_spec(
&mut self,
placeholder: &FormatPlaceholder,
argmap: &mut FxIndexSet<(usize, ArgumentType)>,
) -> ExprId {
let position = match placeholder.argument.index {
Ok(arg_index) => {
let (i, _) =
argmap.insert_full((arg_index, ArgumentType::Format(placeholder.format_trait)));
self.alloc_expr_desugared(Expr::Literal(Literal::Uint(
i as u128,
Some(BuiltinUint::Usize),
)))
}
Err(_) => self.missing_expr(),
};
let &FormatOptions {
ref width,
ref precision,
alignment,
fill,
sign,
alternate,
zero_pad,
debug_hex,
} = &placeholder.format_options;
let fill = self.alloc_expr_desugared(Expr::Literal(Literal::Char(fill.unwrap_or(' '))));
let align = {
let align = LangItem::FormatAlignment.ty_rel_path(
self.db,
self.krate,
match alignment {
Some(FormatAlignment::Left) => name![Left],
Some(FormatAlignment::Right) => name![Right],
Some(FormatAlignment::Center) => name![Center],
None => name![Unknown],
},
);
match align {
Some(path) => self.alloc_expr_desugared(Expr::Path(path)),
None => self.missing_expr(),
}
};
// This needs to match `Flag` in library/core/src/fmt/rt.rs.
let flags: u32 = ((sign == Some(FormatSign::Plus)) as u32)
| ((sign == Some(FormatSign::Minus)) as u32) << 1
| (alternate as u32) << 2
| (zero_pad as u32) << 3
| ((debug_hex == Some(FormatDebugHex::Lower)) as u32) << 4
| ((debug_hex == Some(FormatDebugHex::Upper)) as u32) << 5;
let flags = self.alloc_expr_desugared(Expr::Literal(Literal::Uint(
flags as u128,
Some(BuiltinUint::U32),
)));
let precision = self.make_count(&precision, argmap);
let width = self.make_count(&width, argmap);
let format_placeholder_new = {
let format_placeholder_new =
LangItem::FormatPlaceholder.ty_rel_path(self.db, self.krate, name![new]);
match format_placeholder_new {
Some(path) => self.alloc_expr_desugared(Expr::Path(path)),
None => self.missing_expr(),
}
};
self.alloc_expr_desugared(Expr::Call {
callee: format_placeholder_new,
args: Box::new([position, fill, align, flags, precision, width]),
is_assignee_expr: false,
})
}
/// Generate a hir expression for a format_args Count.
///
/// Generates:
///
/// ```text
/// <core::fmt::rt::Count>::Is(…)
/// ```
///
/// or
///
/// ```text
/// <core::fmt::rt::Count>::Param(…)
/// ```
///
/// or
///
/// ```text
/// <core::fmt::rt::Count>::Implied
/// ```
fn make_count(
&mut self,
count: &Option<FormatCount>,
argmap: &mut FxIndexSet<(usize, ArgumentType)>,
) -> ExprId {
match count {
Some(FormatCount::Literal(n)) => {
match LangItem::FormatCount.ty_rel_path(self.db, self.krate, name![Is]) {
Some(count_is) => {
let count_is = self.alloc_expr_desugared(Expr::Path(count_is));
let args = self.alloc_expr_desugared(Expr::Literal(Literal::Uint(
*n as u128,
Some(BuiltinUint::Usize),
)));
self.alloc_expr_desugared(Expr::Call {
callee: count_is,
args: Box::new([args]),
is_assignee_expr: false,
})
}
None => self.missing_expr(),
}
}
Some(FormatCount::Argument(arg)) => {
if let Ok(arg_index) = arg.index {
let (i, _) = argmap.insert_full((arg_index, ArgumentType::Usize));
match LangItem::FormatCount.ty_rel_path(self.db, self.krate, name![Param]) {
Some(count_param) => {
let count_param = self.alloc_expr_desugared(Expr::Path(count_param));
let args = self.alloc_expr_desugared(Expr::Literal(Literal::Uint(
i as u128,
Some(BuiltinUint::Usize),
)));
self.alloc_expr_desugared(Expr::Call {
callee: count_param,
args: Box::new([args]),
is_assignee_expr: false,
})
}
None => self.missing_expr(),
}
} else {
self.missing_expr()
}
}
None => match LangItem::FormatCount.ty_rel_path(self.db, self.krate, name![Implied]) {
Some(count_param) => self.alloc_expr_desugared(Expr::Path(count_param)),
None => self.missing_expr(),
},
}
}
/// Generate a hir expression representing an argument to a format_args invocation.
///
/// Generates:
///
/// ```text
/// <core::fmt::Argument>::new_…(arg)
/// ```
fn make_argument(&mut self, arg: ExprId, ty: ArgumentType) -> ExprId {
use ArgumentType::*;
use FormatTrait::*;
match LangItem::FormatArgument.ty_rel_path(
self.db,
self.krate,
match ty {
Format(Display) => name![new_display],
Format(Debug) => name![new_debug],
Format(LowerExp) => name![new_lower_exp],
Format(UpperExp) => name![new_upper_exp],
Format(Octal) => name![new_octal],
Format(Pointer) => name![new_pointer],
Format(Binary) => name![new_binary],
Format(LowerHex) => name![new_lower_hex],
Format(UpperHex) => name![new_upper_hex],
Usize => name![from_usize],
},
) {
Some(new_fn) => {
let new_fn = self.alloc_expr_desugared(Expr::Path(new_fn));
self.alloc_expr_desugared(Expr::Call {
callee: new_fn,
args: Box::new([arg]),
is_assignee_expr: false,
})
}
None => self.missing_expr(),
}
}
// endregion: format
} }
fn pat_literal_to_hir(lit: &ast::LiteralPat) -> Option<(Literal, ast::Literal)> { fn pat_literal_to_hir(lit: &ast::LiteralPat) -> Option<(Literal, ast::Literal)> {
@ -1606,3 +2033,9 @@ fn comma_follows_token(t: Option<syntax::SyntaxToken>) -> bool {
(|| syntax::algo::skip_trivia_token(t?.next_token()?, syntax::Direction::Next))() (|| syntax::algo::skip_trivia_token(t?.next_token()?, syntax::Direction::Next))()
.map_or(false, |it| it.kind() == syntax::T![,]) .map_or(false, |it| it.kind() == syntax::T![,])
} }
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
enum ArgumentType {
Format(FormatTrait),
Usize,
}

View file

@ -2,7 +2,7 @@
use std::fmt::{self, Write}; use std::fmt::{self, Write};
use hir_expand::db::ExpandDatabase; use itertools::Itertools;
use syntax::ast::HasName; use syntax::ast::HasName;
use crate::{ use crate::{
@ -51,8 +51,7 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo
} }
}; };
let mut p = let mut p = Printer { db, body, buf: header, indent_level: 0, needs_indent: false };
Printer { db: db.upcast(), body, buf: header, indent_level: 0, needs_indent: false };
if let DefWithBodyId::FunctionId(it) = owner { if let DefWithBodyId::FunctionId(it) = owner {
p.buf.push('('); p.buf.push('(');
body.params.iter().zip(&db.function_data(it).params).for_each(|(&param, ty)| { body.params.iter().zip(&db.function_data(it).params).for_each(|(&param, ty)| {
@ -76,8 +75,7 @@ pub(super) fn print_expr_hir(
_owner: DefWithBodyId, _owner: DefWithBodyId,
expr: ExprId, expr: ExprId,
) -> String { ) -> String {
let mut p = let mut p = Printer { db, body, buf: String::new(), indent_level: 0, needs_indent: false };
Printer { db: db.upcast(), body, buf: String::new(), indent_level: 0, needs_indent: false };
p.print_expr(expr); p.print_expr(expr);
p.buf p.buf
} }
@ -98,7 +96,7 @@ macro_rules! wln {
} }
struct Printer<'a> { struct Printer<'a> {
db: &'a dyn ExpandDatabase, db: &'a dyn DefDatabase,
body: &'a Body, body: &'a Body,
buf: String, buf: String,
indent_level: usize, indent_level: usize,
@ -142,9 +140,14 @@ impl Printer<'_> {
} }
fn newline(&mut self) { fn newline(&mut self) {
match self.buf.chars().rev().find(|ch| *ch != ' ') { match self.buf.chars().rev().find_position(|ch| *ch != ' ') {
Some('\n') | None => {} Some((_, '\n')) | None => {}
_ => writeln!(self).unwrap(), Some((idx, _)) => {
if idx != 0 {
self.buf.drain(self.buf.len() - idx..);
}
writeln!(self).unwrap()
}
} }
} }
@ -154,6 +157,19 @@ impl Printer<'_> {
match expr { match expr {
Expr::Missing => w!(self, "<EFBFBD>"), Expr::Missing => w!(self, "<EFBFBD>"),
Expr::Underscore => w!(self, "_"), Expr::Underscore => w!(self, "_"),
Expr::InlineAsm(_) => w!(self, "builtin#asm(_)"),
Expr::OffsetOf(offset_of) => {
w!(self, "builtin#offset_of(");
self.print_type_ref(&offset_of.container);
w!(
self,
", {})",
offset_of
.fields
.iter()
.format_with(".", |field, f| f(&field.display(self.db.upcast())))
);
}
Expr::Path(path) => self.print_path(path), Expr::Path(path) => self.print_path(path),
Expr::If { condition, then_branch, else_branch } => { Expr::If { condition, then_branch, else_branch } => {
w!(self, "if "); w!(self, "if ");
@ -173,7 +189,7 @@ impl Printer<'_> {
} }
Expr::Loop { body, label } => { Expr::Loop { body, label } => {
if let Some(lbl) = label { if let Some(lbl) = label {
w!(self, "{}: ", self.body[*lbl].name.display(self.db)); w!(self, "{}: ", self.body[*lbl].name.display(self.db.upcast()));
} }
w!(self, "loop "); w!(self, "loop ");
self.print_expr(*body); self.print_expr(*body);
@ -193,7 +209,7 @@ impl Printer<'_> {
} }
Expr::MethodCall { receiver, method_name, args, generic_args } => { Expr::MethodCall { receiver, method_name, args, generic_args } => {
self.print_expr(*receiver); self.print_expr(*receiver);
w!(self, ".{}", method_name.display(self.db)); w!(self, ".{}", method_name.display(self.db.upcast()));
if let Some(args) = generic_args { if let Some(args) = generic_args {
w!(self, "::<"); w!(self, "::<");
print_generic_args(self.db, args, self).unwrap(); print_generic_args(self.db, args, self).unwrap();
@ -231,13 +247,13 @@ impl Printer<'_> {
Expr::Continue { label } => { Expr::Continue { label } => {
w!(self, "continue"); w!(self, "continue");
if let Some(lbl) = label { if let Some(lbl) = label {
w!(self, " {}", self.body[*lbl].name.display(self.db)); w!(self, " {}", self.body[*lbl].name.display(self.db.upcast()));
} }
} }
Expr::Break { expr, label } => { Expr::Break { expr, label } => {
w!(self, "break"); w!(self, "break");
if let Some(lbl) = label { if let Some(lbl) = label {
w!(self, " {}", self.body[*lbl].name.display(self.db)); w!(self, " {}", self.body[*lbl].name.display(self.db.upcast()));
} }
if let Some(expr) = expr { if let Some(expr) = expr {
self.whitespace(); self.whitespace();
@ -276,7 +292,7 @@ impl Printer<'_> {
w!(self, "{{"); w!(self, "{{");
self.indented(|p| { self.indented(|p| {
for field in &**fields { for field in &**fields {
w!(p, "{}: ", field.name.display(self.db)); w!(p, "{}: ", field.name.display(self.db.upcast()));
p.print_expr(field.expr); p.print_expr(field.expr);
wln!(p, ","); wln!(p, ",");
} }
@ -293,7 +309,7 @@ impl Printer<'_> {
} }
Expr::Field { expr, name } => { Expr::Field { expr, name } => {
self.print_expr(*expr); self.print_expr(*expr);
w!(self, ".{}", name.display(self.db)); w!(self, ".{}", name.display(self.db.upcast()));
} }
Expr::Await { expr } => { Expr::Await { expr } => {
self.print_expr(*expr); self.print_expr(*expr);
@ -431,7 +447,8 @@ impl Printer<'_> {
} }
Expr::Literal(lit) => self.print_literal(lit), Expr::Literal(lit) => self.print_literal(lit),
Expr::Block { id: _, statements, tail, label } => { Expr::Block { id: _, statements, tail, label } => {
let label = label.map(|lbl| format!("{}: ", self.body[lbl].name.display(self.db))); let label =
label.map(|lbl| format!("{}: ", self.body[lbl].name.display(self.db.upcast())));
self.print_block(label.as_deref(), statements, tail); self.print_block(label.as_deref(), statements, tail);
} }
Expr::Unsafe { id: _, statements, tail } => { Expr::Unsafe { id: _, statements, tail } => {
@ -507,7 +524,7 @@ impl Printer<'_> {
w!(self, " {{"); w!(self, " {{");
self.indented(|p| { self.indented(|p| {
for arg in args.iter() { for arg in args.iter() {
w!(p, "{}: ", arg.name.display(self.db)); w!(p, "{}: ", arg.name.display(self.db.upcast()));
p.print_pat(arg.pat); p.print_pat(arg.pat);
wln!(p, ","); wln!(p, ",");
} }
@ -666,6 +683,6 @@ impl Printer<'_> {
BindingAnnotation::Ref => "ref ", BindingAnnotation::Ref => "ref ",
BindingAnnotation::RefMut => "ref mut ", BindingAnnotation::RefMut => "ref mut ",
}; };
w!(self, "{}{}", mode, name.display(self.db)); w!(self, "{}{}", mode, name.display(self.db.upcast()));
} }
} }

View file

@ -1,13 +1,13 @@
mod block; mod block;
use base_db::{fixture::WithFixture, SourceDatabase}; use base_db::{fixture::WithFixture, SourceDatabase};
use expect_test::Expect; use expect_test::{expect, Expect};
use crate::{test_db::TestDB, ModuleDefId}; use crate::{test_db::TestDB, ModuleDefId};
use super::*; use super::*;
fn lower(ra_fixture: &str) -> Arc<Body> { fn lower(ra_fixture: &str) -> (TestDB, Arc<Body>, DefWithBodyId) {
let db = TestDB::with_files(ra_fixture); let db = TestDB::with_files(ra_fixture);
let krate = db.crate_graph().iter().next().unwrap(); let krate = db.crate_graph().iter().next().unwrap();
@ -21,8 +21,10 @@ fn lower(ra_fixture: &str) -> Arc<Body> {
} }
} }
} }
let fn_def = fn_def.unwrap().into();
db.body(fn_def.unwrap().into()) let body = db.body(fn_def);
(db, body, fn_def)
} }
fn def_map_at(ra_fixture: &str) -> String { fn def_map_at(ra_fixture: &str) -> String {
@ -138,3 +140,84 @@ mod m {
"#, "#,
); );
} }
#[test]
fn desugar_builtin_format_args() {
// Regression test for a path resolution bug introduced with inner item handling.
let (db, body, def) = lower(
r#"
//- minicore: fmt
fn main() {
let are = "are";
let count = 10;
builtin#format_args("hello {count:02} {} friends, we {are:?} {0}{last}", "fancy", last = "!");
}
"#,
);
expect![[r#"
fn main() {
let are = "are";
let count = 10;
builtin#lang(Arguments::new_v1_formatted)(
&[
"\"hello ", " ", " friends, we ", " ", "", "\"",
],
&[
builtin#lang(Argument::new_display)(
&count,
), builtin#lang(Argument::new_display)(
&"fancy",
), builtin#lang(Argument::new_debug)(
&are,
), builtin#lang(Argument::new_display)(
&"!",
),
],
&[
builtin#lang(Placeholder::new)(
0usize,
' ',
builtin#lang(Alignment::Unknown),
8u32,
builtin#lang(Count::Implied),
builtin#lang(Count::Is)(
2usize,
),
), builtin#lang(Placeholder::new)(
1usize,
' ',
builtin#lang(Alignment::Unknown),
0u32,
builtin#lang(Count::Implied),
builtin#lang(Count::Implied),
), builtin#lang(Placeholder::new)(
2usize,
' ',
builtin#lang(Alignment::Unknown),
0u32,
builtin#lang(Count::Implied),
builtin#lang(Count::Implied),
), builtin#lang(Placeholder::new)(
1usize,
' ',
builtin#lang(Alignment::Unknown),
0u32,
builtin#lang(Count::Implied),
builtin#lang(Count::Implied),
), builtin#lang(Placeholder::new)(
3usize,
' ',
builtin#lang(Alignment::Unknown),
0u32,
builtin#lang(Count::Implied),
builtin#lang(Count::Implied),
),
],
unsafe {
builtin#lang(UnsafeArg::new)()
},
);
}"#]]
.assert_eq(&body.pretty_print(&db, def))
}

View file

@ -447,6 +447,7 @@ impl VariantData {
} }
} }
// FIXME: Linear lookup
pub fn field(&self, name: &Name) -> Option<LocalFieldId> { pub fn field(&self, name: &Name) -> Option<LocalFieldId> {
self.fields().iter().find_map(|(id, data)| if &data.name == name { Some(id) } else { None }) self.fields().iter().find_map(|(id, data)| if &data.name == name { Some(id) } else { None })
} }

View file

@ -37,6 +37,20 @@ pub fn find_path_prefixed(
find_path_inner(db, item, from, Some(prefix_kind), prefer_no_std) find_path_inner(db, item, from, Some(prefix_kind), prefer_no_std)
} }
#[derive(Copy, Clone, Debug)]
enum Stability {
Unstable,
Stable,
}
use Stability::*;
fn zip_stability(a: Stability, b: Stability) -> Stability {
match (a, b) {
(Stable, Stable) => Stable,
_ => Unstable,
}
}
const MAX_PATH_LEN: usize = 15; const MAX_PATH_LEN: usize = 15;
#[derive(Copy, Clone, Debug, PartialEq, Eq)] #[derive(Copy, Clone, Debug, PartialEq, Eq)]
@ -95,7 +109,8 @@ fn find_path_inner(
MAX_PATH_LEN, MAX_PATH_LEN,
prefixed, prefixed,
prefer_no_std || db.crate_supports_no_std(crate_root.krate), prefer_no_std || db.crate_supports_no_std(crate_root.krate),
); )
.map(|(item, _)| item);
} }
// - if the item is already in scope, return the name under which it is // - if the item is already in scope, return the name under which it is
@ -143,6 +158,7 @@ fn find_path_inner(
prefer_no_std || db.crate_supports_no_std(crate_root.krate), prefer_no_std || db.crate_supports_no_std(crate_root.krate),
scope_name, scope_name,
) )
.map(|(item, _)| item)
} }
fn find_path_for_module( fn find_path_for_module(
@ -155,7 +171,7 @@ fn find_path_for_module(
max_len: usize, max_len: usize,
prefixed: Option<PrefixKind>, prefixed: Option<PrefixKind>,
prefer_no_std: bool, prefer_no_std: bool,
) -> Option<ModPath> { ) -> Option<(ModPath, Stability)> {
if max_len == 0 { if max_len == 0 {
return None; return None;
} }
@ -165,19 +181,19 @@ fn find_path_for_module(
let scope_name = find_in_scope(db, def_map, from, ItemInNs::Types(module_id.into())); let scope_name = find_in_scope(db, def_map, from, ItemInNs::Types(module_id.into()));
if prefixed.is_none() { if prefixed.is_none() {
if let Some(scope_name) = scope_name { if let Some(scope_name) = scope_name {
return Some(ModPath::from_segments(PathKind::Plain, Some(scope_name))); return Some((ModPath::from_segments(PathKind::Plain, Some(scope_name)), Stable));
} }
} }
// - if the item is the crate root, return `crate` // - if the item is the crate root, return `crate`
if module_id == crate_root { if module_id == crate_root {
return Some(ModPath::from_segments(PathKind::Crate, None)); return Some((ModPath::from_segments(PathKind::Crate, None), Stable));
} }
// - if relative paths are fine, check if we are searching for a parent // - if relative paths are fine, check if we are searching for a parent
if prefixed.filter(PrefixKind::is_absolute).is_none() { if prefixed.filter(PrefixKind::is_absolute).is_none() {
if let modpath @ Some(_) = find_self_super(def_map, module_id, from) { if let modpath @ Some(_) = find_self_super(def_map, module_id, from) {
return modpath; return modpath.zip(Some(Stable));
} }
} }
@ -201,14 +217,14 @@ fn find_path_for_module(
} else { } else {
PathKind::Plain PathKind::Plain
}; };
return Some(ModPath::from_segments(kind, Some(name))); return Some((ModPath::from_segments(kind, Some(name)), Stable));
} }
} }
if let value @ Some(_) = if let value @ Some(_) =
find_in_prelude(db, &root_def_map, &def_map, ItemInNs::Types(module_id.into()), from) find_in_prelude(db, &root_def_map, &def_map, ItemInNs::Types(module_id.into()), from)
{ {
return value; return value.zip(Some(Stable));
} }
calculate_best_path( calculate_best_path(
db, db,
@ -301,11 +317,19 @@ fn calculate_best_path(
mut prefixed: Option<PrefixKind>, mut prefixed: Option<PrefixKind>,
prefer_no_std: bool, prefer_no_std: bool,
scope_name: Option<Name>, scope_name: Option<Name>,
) -> Option<ModPath> { ) -> Option<(ModPath, Stability)> {
if max_len <= 1 { if max_len <= 1 {
return None; return None;
} }
let mut best_path = None; let mut best_path = None;
let update_best_path =
|best_path: &mut Option<_>, new_path: (ModPath, Stability)| match best_path {
Some((old_path, old_stability)) => {
*old_path = new_path.0;
*old_stability = zip_stability(*old_stability, new_path.1);
}
None => *best_path = Some(new_path),
};
// Recursive case: // Recursive case:
// - otherwise, look for modules containing (reexporting) it and import it from one of those // - otherwise, look for modules containing (reexporting) it and import it from one of those
if item.krate(db) == Some(from.krate) { if item.krate(db) == Some(from.krate) {
@ -328,14 +352,14 @@ fn calculate_best_path(
prefixed, prefixed,
prefer_no_std, prefer_no_std,
) { ) {
path.push_segment(name); path.0.push_segment(name);
let new_path = match best_path { let new_path = match best_path.take() {
Some(best_path) => select_best_path(best_path, path, prefer_no_std), Some(best_path) => select_best_path(best_path, path, prefer_no_std),
None => path, None => path,
}; };
best_path_len = new_path.len(); best_path_len = new_path.0.len();
best_path = Some(new_path); update_best_path(&mut best_path, new_path);
} }
} }
} else { } else {
@ -354,7 +378,7 @@ fn calculate_best_path(
// Determine best path for containing module and append last segment from `info`. // Determine best path for containing module and append last segment from `info`.
// FIXME: we should guide this to look up the path locally, or from the same crate again? // FIXME: we should guide this to look up the path locally, or from the same crate again?
let mut path = find_path_for_module( let (mut path, path_stability) = find_path_for_module(
db, db,
def_map, def_map,
visited_modules, visited_modules,
@ -367,16 +391,19 @@ fn calculate_best_path(
)?; )?;
cov_mark::hit!(partially_imported); cov_mark::hit!(partially_imported);
path.push_segment(info.name.clone()); path.push_segment(info.name.clone());
Some(path) Some((
path,
zip_stability(path_stability, if info.is_unstable { Unstable } else { Stable }),
))
}) })
}); });
for path in extern_paths { for path in extern_paths {
let new_path = match best_path { let new_path = match best_path.take() {
Some(best_path) => select_best_path(best_path, path, prefer_no_std), Some(best_path) => select_best_path(best_path, path, prefer_no_std),
None => path, None => path,
}; };
best_path = Some(new_path); update_best_path(&mut best_path, new_path);
} }
} }
if let Some(module) = item.module(db) { if let Some(module) = item.module(db) {
@ -387,15 +414,24 @@ fn calculate_best_path(
} }
match prefixed.map(PrefixKind::prefix) { match prefixed.map(PrefixKind::prefix) {
Some(prefix) => best_path.or_else(|| { Some(prefix) => best_path.or_else(|| {
scope_name.map(|scope_name| ModPath::from_segments(prefix, Some(scope_name))) scope_name.map(|scope_name| (ModPath::from_segments(prefix, Some(scope_name)), Stable))
}), }),
None => best_path, None => best_path,
} }
} }
fn select_best_path(old_path: ModPath, new_path: ModPath, prefer_no_std: bool) -> ModPath { fn select_best_path(
old_path: (ModPath, Stability),
new_path: (ModPath, Stability),
prefer_no_std: bool,
) -> (ModPath, Stability) {
match (old_path.1, new_path.1) {
(Stable, Unstable) => return old_path,
(Unstable, Stable) => return new_path,
_ => {}
}
const STD_CRATES: [Name; 3] = [known::std, known::core, known::alloc]; const STD_CRATES: [Name; 3] = [known::std, known::core, known::alloc];
match (old_path.segments().first(), new_path.segments().first()) { match (old_path.0.segments().first(), new_path.0.segments().first()) {
(Some(old), Some(new)) if STD_CRATES.contains(old) && STD_CRATES.contains(new) => { (Some(old), Some(new)) if STD_CRATES.contains(old) && STD_CRATES.contains(new) => {
let rank = match prefer_no_std { let rank = match prefer_no_std {
false => |name: &Name| match name { false => |name: &Name| match name {
@ -416,7 +452,7 @@ fn select_best_path(old_path: ModPath, new_path: ModPath, prefer_no_std: bool) -
match nrank.cmp(&orank) { match nrank.cmp(&orank) {
Ordering::Less => old_path, Ordering::Less => old_path,
Ordering::Equal => { Ordering::Equal => {
if new_path.len() < old_path.len() { if new_path.0.len() < old_path.0.len() {
new_path new_path
} else { } else {
old_path old_path
@ -426,7 +462,7 @@ fn select_best_path(old_path: ModPath, new_path: ModPath, prefer_no_std: bool) -
} }
} }
_ => { _ => {
if new_path.len() < old_path.len() { if new_path.0.len() < old_path.0.len() {
new_path new_path
} else { } else {
old_path old_path
@ -1360,4 +1396,29 @@ pub mod ops {
"std::ops::Deref", "std::ops::Deref",
); );
} }
#[test]
fn respect_unstable_modules() {
check_found_path(
r#"
//- /main.rs crate:main deps:std,core
#![no_std]
extern crate std;
$0
//- /longer.rs crate:std deps:core
pub mod error {
pub use core::error::Error;
}
//- /core.rs crate:core
pub mod error {
#![unstable(feature = "error_in_core", issue = "103765")]
pub trait Error {}
}
"#,
"std::error::Error",
"std::error::Error",
"std::error::Error",
"std::error::Error",
);
}
} }

View file

@ -13,6 +13,7 @@
//! See also a neighboring `body` module. //! See also a neighboring `body` module.
pub mod type_ref; pub mod type_ref;
pub mod format_args;
use std::fmt; use std::fmt;
@ -117,7 +118,6 @@ impl From<ast::LiteralKind> for Literal {
fn from(ast_lit_kind: ast::LiteralKind) -> Self { fn from(ast_lit_kind: ast::LiteralKind) -> Self {
use ast::LiteralKind; use ast::LiteralKind;
match ast_lit_kind { match ast_lit_kind {
// FIXME: these should have actual values filled in, but unsure on perf impact
LiteralKind::IntNumber(lit) => { LiteralKind::IntNumber(lit) => {
if let builtin @ Some(_) = lit.suffix().and_then(BuiltinFloat::from_suffix) { if let builtin @ Some(_) = lit.suffix().and_then(BuiltinFloat::from_suffix) {
Literal::Float( Literal::Float(
@ -281,6 +281,19 @@ pub enum Expr {
Array(Array), Array(Array),
Literal(Literal), Literal(Literal),
Underscore, Underscore,
OffsetOf(OffsetOf),
InlineAsm(InlineAsm),
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct OffsetOf {
pub container: Interned<TypeRef>,
pub fields: Box<[Name]>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct InlineAsm {
pub e: ExprId,
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
@ -341,7 +354,8 @@ impl Expr {
pub fn walk_child_exprs(&self, mut f: impl FnMut(ExprId)) { pub fn walk_child_exprs(&self, mut f: impl FnMut(ExprId)) {
match self { match self {
Expr::Missing => {} Expr::Missing => {}
Expr::Path(_) => {} Expr::Path(_) | Expr::OffsetOf(_) => {}
Expr::InlineAsm(it) => f(it.e),
Expr::If { condition, then_branch, else_branch } => { Expr::If { condition, then_branch, else_branch } => {
f(*condition); f(*condition);
f(*then_branch); f(*then_branch);

View file

@ -0,0 +1,502 @@
//! Parses `format_args` input.
use std::mem;
use hir_expand::name::Name;
use rustc_parse_format as parse;
use syntax::{
ast::{self, IsString},
AstToken, SmolStr, TextRange,
};
use crate::hir::ExprId;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct FormatArgs {
pub template: Box<[FormatArgsPiece]>,
pub arguments: FormatArguments,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct FormatArguments {
pub arguments: Box<[FormatArgument]>,
pub num_unnamed_args: usize,
pub num_explicit_args: usize,
pub names: Box<[(Name, usize)]>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum FormatArgsPiece {
Literal(Box<str>),
Placeholder(FormatPlaceholder),
}
#[derive(Copy, Debug, Clone, PartialEq, Eq)]
pub struct FormatPlaceholder {
/// Index into [`FormatArgs::arguments`].
pub argument: FormatArgPosition,
/// The span inside the format string for the full `{…}` placeholder.
pub span: Option<TextRange>,
/// `{}`, `{:?}`, or `{:x}`, etc.
pub format_trait: FormatTrait,
/// `{}` or `{:.5}` or `{:-^20}`, etc.
pub format_options: FormatOptions,
}
#[derive(Copy, Debug, Clone, PartialEq, Eq)]
pub struct FormatArgPosition {
/// Which argument this position refers to (Ok),
/// or would've referred to if it existed (Err).
pub index: Result<usize, usize>,
/// What kind of position this is. See [`FormatArgPositionKind`].
pub kind: FormatArgPositionKind,
/// The span of the name or number.
pub span: Option<TextRange>,
}
#[derive(Copy, Debug, Clone, PartialEq, Eq)]
pub enum FormatArgPositionKind {
/// `{}` or `{:.*}`
Implicit,
/// `{1}` or `{:1$}` or `{:.1$}`
Number,
/// `{a}` or `{:a$}` or `{:.a$}`
Named,
}
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
pub enum FormatTrait {
/// `{}`
Display,
/// `{:?}`
Debug,
/// `{:e}`
LowerExp,
/// `{:E}`
UpperExp,
/// `{:o}`
Octal,
/// `{:p}`
Pointer,
/// `{:b}`
Binary,
/// `{:x}`
LowerHex,
/// `{:X}`
UpperHex,
}
#[derive(Copy, Clone, Default, Debug, PartialEq, Eq)]
pub struct FormatOptions {
/// The width. E.g. `{:5}` or `{:width$}`.
pub width: Option<FormatCount>,
/// The precision. E.g. `{:.5}` or `{:.precision$}`.
pub precision: Option<FormatCount>,
/// The alignment. E.g. `{:>}` or `{:<}` or `{:^}`.
pub alignment: Option<FormatAlignment>,
/// The fill character. E.g. the `.` in `{:.>10}`.
pub fill: Option<char>,
/// The `+` or `-` flag.
pub sign: Option<FormatSign>,
/// The `#` flag.
pub alternate: bool,
/// The `0` flag. E.g. the `0` in `{:02x}`.
pub zero_pad: bool,
/// The `x` or `X` flag (for `Debug` only). E.g. the `x` in `{:x?}`.
pub debug_hex: Option<FormatDebugHex>,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum FormatSign {
/// The `+` flag.
Plus,
/// The `-` flag.
Minus,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum FormatDebugHex {
/// The `x` flag in `{:x?}`.
Lower,
/// The `X` flag in `{:X?}`.
Upper,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum FormatAlignment {
/// `{:<}`
Left,
/// `{:>}`
Right,
/// `{:^}`
Center,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum FormatCount {
/// `{:5}` or `{:.5}`
Literal(usize),
/// `{:.*}`, `{:.5$}`, or `{:a$}`, etc.
Argument(FormatArgPosition),
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct FormatArgument {
pub kind: FormatArgumentKind,
pub expr: ExprId,
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum FormatArgumentKind {
/// `format_args(…, arg)`
Normal,
/// `format_args(…, arg = 1)`
Named(Name),
/// `format_args("… {arg} …")`
Captured(Name),
}
// Only used in parse_args and report_invalid_references,
// to indicate how a referred argument was used.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum PositionUsedAs {
Placeholder(Option<TextRange>),
Precision,
Width,
}
use PositionUsedAs::*;
pub(crate) fn parse(
s: &ast::String,
fmt_snippet: Option<String>,
mut args: FormatArgumentsCollector,
is_direct_literal: bool,
mut synth: impl FnMut(Name) -> ExprId,
) -> FormatArgs {
let text = s.text();
let str_style = match s.quote_offsets() {
Some(offsets) => {
let raw = u32::from(offsets.quotes.0.len()) - 1;
(raw != 0).then_some(raw as usize)
}
None => None,
};
let mut parser =
parse::Parser::new(text, str_style, fmt_snippet, false, parse::ParseMode::Format);
let mut pieces = Vec::new();
while let Some(piece) = parser.next() {
if !parser.errors.is_empty() {
break;
} else {
pieces.push(piece);
}
}
let is_source_literal = parser.is_source_literal;
if !parser.errors.is_empty() {
// FIXME: Diagnose
return FormatArgs { template: Default::default(), arguments: args.finish() };
}
let to_span = |inner_span: parse::InnerSpan| {
is_source_literal.then(|| {
TextRange::new(inner_span.start.try_into().unwrap(), inner_span.end.try_into().unwrap())
})
};
let mut used = vec![false; args.explicit_args().len()];
let mut invalid_refs = Vec::new();
let mut numeric_refences_to_named_arg = Vec::new();
enum ArgRef<'a> {
Index(usize),
Name(&'a str, Option<TextRange>),
}
let mut lookup_arg = |arg: ArgRef<'_>,
span: Option<TextRange>,
used_as: PositionUsedAs,
kind: FormatArgPositionKind|
-> FormatArgPosition {
let index = match arg {
ArgRef::Index(index) => {
if let Some(arg) = args.by_index(index) {
used[index] = true;
if arg.kind.ident().is_some() {
// This was a named argument, but it was used as a positional argument.
numeric_refences_to_named_arg.push((index, span, used_as));
}
Ok(index)
} else {
// Doesn't exist as an explicit argument.
invalid_refs.push((index, span, used_as, kind));
Err(index)
}
}
ArgRef::Name(name, _span) => {
let name = Name::new_text_dont_use(SmolStr::new(name));
if let Some((index, _)) = args.by_name(&name) {
// Name found in `args`, so we resolve it to its index.
if index < args.explicit_args().len() {
// Mark it as used, if it was an explicit argument.
used[index] = true;
}
Ok(index)
} else {
// Name not found in `args`, so we add it as an implicitly captured argument.
if !is_direct_literal {
// For the moment capturing variables from format strings expanded from macros is
// disabled (see RFC #2795)
// FIXME: Diagnose
}
Ok(args.add(FormatArgument {
kind: FormatArgumentKind::Captured(name.clone()),
// FIXME: This is problematic, we might want to synthesize a dummy
// expression proper and/or desugar these.
expr: synth(name),
}))
}
}
};
FormatArgPosition { index, kind, span }
};
let mut template = Vec::new();
let mut unfinished_literal = String::new();
let mut placeholder_index = 0;
for piece in pieces {
match piece {
parse::Piece::String(s) => {
unfinished_literal.push_str(s);
}
parse::Piece::NextArgument(arg) => {
let parse::Argument { position, position_span, format } = *arg;
if !unfinished_literal.is_empty() {
template.push(FormatArgsPiece::Literal(
mem::take(&mut unfinished_literal).into_boxed_str(),
));
}
let span = parser.arg_places.get(placeholder_index).and_then(|&s| to_span(s));
placeholder_index += 1;
let position_span = to_span(position_span);
let argument = match position {
parse::ArgumentImplicitlyIs(i) => lookup_arg(
ArgRef::Index(i),
position_span,
Placeholder(span),
FormatArgPositionKind::Implicit,
),
parse::ArgumentIs(i) => lookup_arg(
ArgRef::Index(i),
position_span,
Placeholder(span),
FormatArgPositionKind::Number,
),
parse::ArgumentNamed(name) => lookup_arg(
ArgRef::Name(name, position_span),
position_span,
Placeholder(span),
FormatArgPositionKind::Named,
),
};
let alignment = match format.align {
parse::AlignUnknown => None,
parse::AlignLeft => Some(FormatAlignment::Left),
parse::AlignRight => Some(FormatAlignment::Right),
parse::AlignCenter => Some(FormatAlignment::Center),
};
let format_trait = match format.ty {
"" => FormatTrait::Display,
"?" => FormatTrait::Debug,
"e" => FormatTrait::LowerExp,
"E" => FormatTrait::UpperExp,
"o" => FormatTrait::Octal,
"p" => FormatTrait::Pointer,
"b" => FormatTrait::Binary,
"x" => FormatTrait::LowerHex,
"X" => FormatTrait::UpperHex,
_ => {
// FIXME: Diagnose
FormatTrait::Display
}
};
let precision_span = format.precision_span.and_then(to_span);
let precision = match format.precision {
parse::CountIs(n) => Some(FormatCount::Literal(n)),
parse::CountIsName(name, name_span) => Some(FormatCount::Argument(lookup_arg(
ArgRef::Name(name, to_span(name_span)),
precision_span,
Precision,
FormatArgPositionKind::Named,
))),
parse::CountIsParam(i) => Some(FormatCount::Argument(lookup_arg(
ArgRef::Index(i),
precision_span,
Precision,
FormatArgPositionKind::Number,
))),
parse::CountIsStar(i) => Some(FormatCount::Argument(lookup_arg(
ArgRef::Index(i),
precision_span,
Precision,
FormatArgPositionKind::Implicit,
))),
parse::CountImplied => None,
};
let width_span = format.width_span.and_then(to_span);
let width = match format.width {
parse::CountIs(n) => Some(FormatCount::Literal(n)),
parse::CountIsName(name, name_span) => Some(FormatCount::Argument(lookup_arg(
ArgRef::Name(name, to_span(name_span)),
width_span,
Width,
FormatArgPositionKind::Named,
))),
parse::CountIsParam(i) => Some(FormatCount::Argument(lookup_arg(
ArgRef::Index(i),
width_span,
Width,
FormatArgPositionKind::Number,
))),
parse::CountIsStar(_) => unreachable!(),
parse::CountImplied => None,
};
template.push(FormatArgsPiece::Placeholder(FormatPlaceholder {
argument,
span,
format_trait,
format_options: FormatOptions {
fill: format.fill,
alignment,
sign: format.sign.map(|s| match s {
parse::Sign::Plus => FormatSign::Plus,
parse::Sign::Minus => FormatSign::Minus,
}),
alternate: format.alternate,
zero_pad: format.zero_pad,
debug_hex: format.debug_hex.map(|s| match s {
parse::DebugHex::Lower => FormatDebugHex::Lower,
parse::DebugHex::Upper => FormatDebugHex::Upper,
}),
precision,
width,
},
}));
}
}
}
if !unfinished_literal.is_empty() {
template.push(FormatArgsPiece::Literal(unfinished_literal.into_boxed_str()));
}
if !invalid_refs.is_empty() {
// FIXME: Diagnose
}
let unused = used
.iter()
.enumerate()
.filter(|&(_, used)| !used)
.map(|(i, _)| {
let named = matches!(args.explicit_args()[i].kind, FormatArgumentKind::Named(_));
(args.explicit_args()[i].expr, named)
})
.collect::<Vec<_>>();
if !unused.is_empty() {
// FIXME: Diagnose
}
FormatArgs { template: template.into_boxed_slice(), arguments: args.finish() }
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct FormatArgumentsCollector {
arguments: Vec<FormatArgument>,
num_unnamed_args: usize,
num_explicit_args: usize,
names: Vec<(Name, usize)>,
}
impl FormatArgumentsCollector {
pub(crate) fn finish(self) -> FormatArguments {
FormatArguments {
arguments: self.arguments.into_boxed_slice(),
num_unnamed_args: self.num_unnamed_args,
num_explicit_args: self.num_explicit_args,
names: self.names.into_boxed_slice(),
}
}
pub fn new() -> Self {
Self { arguments: vec![], names: vec![], num_unnamed_args: 0, num_explicit_args: 0 }
}
pub fn add(&mut self, arg: FormatArgument) -> usize {
let index = self.arguments.len();
if let Some(name) = arg.kind.ident() {
self.names.push((name.clone(), index));
} else if self.names.is_empty() {
// Only count the unnamed args before the first named arg.
// (Any later ones are errors.)
self.num_unnamed_args += 1;
}
if !matches!(arg.kind, FormatArgumentKind::Captured(..)) {
// This is an explicit argument.
// Make sure that all arguments so far are explicit.
assert_eq!(
self.num_explicit_args,
self.arguments.len(),
"captured arguments must be added last"
);
self.num_explicit_args += 1;
}
self.arguments.push(arg);
index
}
pub fn by_name(&self, name: &Name) -> Option<(usize, &FormatArgument)> {
let &(_, i) = self.names.iter().find(|(n, _)| n == name)?;
Some((i, &self.arguments[i]))
}
pub fn by_index(&self, i: usize) -> Option<&FormatArgument> {
(i < self.num_explicit_args).then(|| &self.arguments[i])
}
pub fn unnamed_args(&self) -> &[FormatArgument] {
&self.arguments[..self.num_unnamed_args]
}
pub fn named_args(&self) -> &[FormatArgument] {
&self.arguments[self.num_unnamed_args..self.num_explicit_args]
}
pub fn explicit_args(&self) -> &[FormatArgument] {
&self.arguments[..self.num_explicit_args]
}
pub fn all_args(&self) -> &[FormatArgument] {
&self.arguments[..]
}
pub fn all_args_mut(&mut self) -> &mut Vec<FormatArgument> {
&mut self.arguments
}
}
impl FormatArgumentKind {
pub fn ident(&self) -> Option<&Name> {
match self {
Self::Normal => None,
Self::Named(id) => Some(id),
Self::Captured(id) => Some(id),
}
}
}

View file

@ -32,6 +32,8 @@ pub struct ImportInfo {
pub is_trait_assoc_item: bool, pub is_trait_assoc_item: bool,
/// Whether this item is annotated with `#[doc(hidden)]`. /// Whether this item is annotated with `#[doc(hidden)]`.
pub is_doc_hidden: bool, pub is_doc_hidden: bool,
/// Whether this item is annotated with `#[unstable(..)]`.
pub is_unstable: bool,
} }
/// A map from publicly exported items to its name. /// A map from publicly exported items to its name.
@ -113,7 +115,6 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemIn
for (name, per_ns) in visible_items { for (name, per_ns) in visible_items {
for (item, import) in per_ns.iter_items() { for (item, import) in per_ns.iter_items() {
// FIXME: Not yet used, but will be once we handle doc(hidden) import sources
let attr_id = if let Some(import) = import { let attr_id = if let Some(import) = import {
match import { match import {
ImportOrExternCrate::ExternCrate(id) => Some(id.into()), ImportOrExternCrate::ExternCrate(id) => Some(id.into()),
@ -125,28 +126,59 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemIn
ItemInNs::Macros(id) => Some(id.into()), ItemInNs::Macros(id) => Some(id.into()),
} }
}; };
let is_doc_hidden = let status @ (is_doc_hidden, is_unstable) =
attr_id.map_or(false, |attr_id| db.attrs(attr_id).has_doc_hidden()); attr_id.map_or((false, false), |attr_id| {
let attrs = db.attrs(attr_id);
(attrs.has_doc_hidden(), attrs.is_unstable())
});
let import_info = ImportInfo { let import_info = ImportInfo {
name: name.clone(), name: name.clone(),
container: module, container: module,
is_trait_assoc_item: false, is_trait_assoc_item: false,
is_doc_hidden, is_doc_hidden,
is_unstable,
}; };
match depth_map.entry(item) { match depth_map.entry(item) {
Entry::Vacant(entry) => _ = entry.insert((depth, is_doc_hidden)), Entry::Vacant(entry) => _ = entry.insert((depth, status)),
Entry::Occupied(mut entry) => { Entry::Occupied(mut entry) => {
let &(occ_depth, occ_is_doc_hidden) = entry.get(); let &(occ_depth, (occ_is_doc_hidden, occ_is_unstable)) = entry.get();
// Prefer the one that is not doc(hidden), (depth, occ_depth);
// Otherwise, if both have the same doc(hidden)-ness and the new path is shorter, prefer that one. let overwrite = match (
let overwrite_entry = occ_is_doc_hidden && !is_doc_hidden is_doc_hidden,
|| occ_is_doc_hidden == is_doc_hidden && depth < occ_depth; occ_is_doc_hidden,
if !overwrite_entry { is_unstable,
occ_is_unstable,
) {
// no change of hiddeness or unstableness
(true, true, true, true)
| (true, true, false, false)
| (false, false, true, true)
| (false, false, false, false) => depth < occ_depth,
// either less hidden or less unstable, accept
(true, true, false, true)
| (false, true, true, true)
| (false, true, false, true)
| (false, true, false, false)
| (false, false, false, true) => true,
// more hidden or unstable, discard
(true, true, true, false)
| (true, false, true, true)
| (true, false, true, false)
| (true, false, false, false)
| (false, false, true, false) => false,
// exchanges doc(hidden) for unstable (and vice-versa),
(true, false, false, true) | (false, true, true, false) => {
depth < occ_depth
}
};
if !overwrite {
continue; continue;
} }
entry.insert((depth, is_doc_hidden)); entry.insert((depth, status));
} }
} }
@ -171,7 +203,7 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemIn
} }
} }
} }
map.shrink_to_fit();
map map
} }
@ -200,11 +232,13 @@ fn collect_trait_assoc_items(
ItemInNs::Values(module_def_id) ItemInNs::Values(module_def_id)
}; };
let attrs = &db.attrs(item.into());
let assoc_item_info = ImportInfo { let assoc_item_info = ImportInfo {
container: trait_import_info.container, container: trait_import_info.container,
name: assoc_item_name.clone(), name: assoc_item_name.clone(),
is_trait_assoc_item: true, is_trait_assoc_item: true,
is_doc_hidden: db.attrs(item.into()).has_doc_hidden(), is_doc_hidden: attrs.has_doc_hidden(),
is_unstable: attrs.is_unstable(),
}; };
map.insert(assoc_item, assoc_item_info); map.insert(assoc_item, assoc_item_info);
} }

View file

@ -177,7 +177,7 @@ impl ItemTree {
} }
pub fn pretty_print(&self, db: &dyn DefDatabase) -> String { pub fn pretty_print(&self, db: &dyn DefDatabase) -> String {
pretty::print_item_tree(db.upcast(), self) pretty::print_item_tree(db, self)
} }
fn data(&self) -> &ItemTreeData { fn data(&self) -> &ItemTreeData {

View file

@ -2,8 +2,6 @@
use std::fmt::{self, Write}; use std::fmt::{self, Write};
use hir_expand::db::ExpandDatabase;
use crate::{ use crate::{
generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget}, generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget},
pretty::{print_path, print_type_bounds, print_type_ref}, pretty::{print_path, print_type_bounds, print_type_ref},
@ -12,7 +10,7 @@ use crate::{
use super::*; use super::*;
pub(super) fn print_item_tree(db: &dyn ExpandDatabase, tree: &ItemTree) -> String { pub(super) fn print_item_tree(db: &dyn DefDatabase, tree: &ItemTree) -> String {
let mut p = Printer { db, tree, buf: String::new(), indent_level: 0, needs_indent: true }; let mut p = Printer { db, tree, buf: String::new(), indent_level: 0, needs_indent: true };
if let Some(attrs) = tree.attrs.get(&AttrOwner::TopLevel) { if let Some(attrs) = tree.attrs.get(&AttrOwner::TopLevel) {
@ -45,7 +43,7 @@ macro_rules! wln {
} }
struct Printer<'a> { struct Printer<'a> {
db: &'a dyn ExpandDatabase, db: &'a dyn DefDatabase,
tree: &'a ItemTree, tree: &'a ItemTree,
buf: String, buf: String,
indent_level: usize, indent_level: usize,
@ -91,7 +89,7 @@ impl Printer<'_> {
self, self,
"#{}[{}{}]{}", "#{}[{}{}]{}",
inner, inner,
attr.path.display(self.db), attr.path.display(self.db.upcast()),
attr.input.as_ref().map(|it| it.to_string()).unwrap_or_default(), attr.input.as_ref().map(|it| it.to_string()).unwrap_or_default(),
separated_by, separated_by,
); );
@ -106,7 +104,7 @@ impl Printer<'_> {
fn print_visibility(&mut self, vis: RawVisibilityId) { fn print_visibility(&mut self, vis: RawVisibilityId) {
match &self.tree[vis] { match &self.tree[vis] {
RawVisibility::Module(path) => w!(self, "pub({}) ", path.display(self.db)), RawVisibility::Module(path) => w!(self, "pub({}) ", path.display(self.db.upcast())),
RawVisibility::Public => w!(self, "pub "), RawVisibility::Public => w!(self, "pub "),
}; };
} }
@ -121,7 +119,7 @@ impl Printer<'_> {
let Field { visibility, name, type_ref, ast_id: _ } = &this.tree[field]; let Field { visibility, name, type_ref, ast_id: _ } = &this.tree[field];
this.print_attrs_of(field, "\n"); this.print_attrs_of(field, "\n");
this.print_visibility(*visibility); this.print_visibility(*visibility);
w!(this, "{}: ", name.display(self.db)); w!(this, "{}: ", name.display(self.db.upcast()));
this.print_type_ref(type_ref); this.print_type_ref(type_ref);
wln!(this, ","); wln!(this, ",");
} }
@ -135,7 +133,7 @@ impl Printer<'_> {
let Field { visibility, name, type_ref, ast_id: _ } = &this.tree[field]; let Field { visibility, name, type_ref, ast_id: _ } = &this.tree[field];
this.print_attrs_of(field, "\n"); this.print_attrs_of(field, "\n");
this.print_visibility(*visibility); this.print_visibility(*visibility);
w!(this, "{}: ", name.display(self.db)); w!(this, "{}: ", name.display(self.db.upcast()));
this.print_type_ref(type_ref); this.print_type_ref(type_ref);
wln!(this, ","); wln!(this, ",");
} }
@ -168,20 +166,20 @@ impl Printer<'_> {
fn print_use_tree(&mut self, use_tree: &UseTree) { fn print_use_tree(&mut self, use_tree: &UseTree) {
match &use_tree.kind { match &use_tree.kind {
UseTreeKind::Single { path, alias } => { UseTreeKind::Single { path, alias } => {
w!(self, "{}", path.display(self.db)); w!(self, "{}", path.display(self.db.upcast()));
if let Some(alias) = alias { if let Some(alias) = alias {
w!(self, " as {}", alias); w!(self, " as {}", alias);
} }
} }
UseTreeKind::Glob { path } => { UseTreeKind::Glob { path } => {
if let Some(path) = path { if let Some(path) = path {
w!(self, "{}::", path.display(self.db)); w!(self, "{}::", path.display(self.db.upcast()));
} }
w!(self, "*"); w!(self, "*");
} }
UseTreeKind::Prefixed { prefix, list } => { UseTreeKind::Prefixed { prefix, list } => {
if let Some(prefix) = prefix { if let Some(prefix) = prefix {
w!(self, "{}::", prefix.display(self.db)); w!(self, "{}::", prefix.display(self.db.upcast()));
} }
w!(self, "{{"); w!(self, "{{");
for (i, tree) in list.iter().enumerate() { for (i, tree) in list.iter().enumerate() {
@ -209,7 +207,7 @@ impl Printer<'_> {
ModItem::ExternCrate(it) => { ModItem::ExternCrate(it) => {
let ExternCrate { name, alias, visibility, ast_id: _ } = &self.tree[it]; let ExternCrate { name, alias, visibility, ast_id: _ } = &self.tree[it];
self.print_visibility(*visibility); self.print_visibility(*visibility);
w!(self, "extern crate {}", name.display(self.db)); w!(self, "extern crate {}", name.display(self.db.upcast()));
if let Some(alias) = alias { if let Some(alias) = alias {
w!(self, " as {}", alias); w!(self, " as {}", alias);
} }
@ -256,7 +254,7 @@ impl Printer<'_> {
if let Some(abi) = abi { if let Some(abi) = abi {
w!(self, "extern \"{}\" ", abi); w!(self, "extern \"{}\" ", abi);
} }
w!(self, "fn {}", name.display(self.db)); w!(self, "fn {}", name.display(self.db.upcast()));
self.print_generic_params(explicit_generic_params); self.print_generic_params(explicit_generic_params);
w!(self, "("); w!(self, "(");
if !params.is_empty() { if !params.is_empty() {
@ -290,7 +288,7 @@ impl Printer<'_> {
ModItem::Struct(it) => { ModItem::Struct(it) => {
let Struct { visibility, name, fields, generic_params, ast_id: _ } = &self.tree[it]; let Struct { visibility, name, fields, generic_params, ast_id: _ } = &self.tree[it];
self.print_visibility(*visibility); self.print_visibility(*visibility);
w!(self, "struct {}", name.display(self.db)); w!(self, "struct {}", name.display(self.db.upcast()));
self.print_generic_params(generic_params); self.print_generic_params(generic_params);
self.print_fields_and_where_clause(fields, generic_params); self.print_fields_and_where_clause(fields, generic_params);
if matches!(fields, Fields::Record(_)) { if matches!(fields, Fields::Record(_)) {
@ -302,7 +300,7 @@ impl Printer<'_> {
ModItem::Union(it) => { ModItem::Union(it) => {
let Union { name, visibility, fields, generic_params, ast_id: _ } = &self.tree[it]; let Union { name, visibility, fields, generic_params, ast_id: _ } = &self.tree[it];
self.print_visibility(*visibility); self.print_visibility(*visibility);
w!(self, "union {}", name.display(self.db)); w!(self, "union {}", name.display(self.db.upcast()));
self.print_generic_params(generic_params); self.print_generic_params(generic_params);
self.print_fields_and_where_clause(fields, generic_params); self.print_fields_and_where_clause(fields, generic_params);
if matches!(fields, Fields::Record(_)) { if matches!(fields, Fields::Record(_)) {
@ -314,14 +312,14 @@ impl Printer<'_> {
ModItem::Enum(it) => { ModItem::Enum(it) => {
let Enum { name, visibility, variants, generic_params, ast_id: _ } = &self.tree[it]; let Enum { name, visibility, variants, generic_params, ast_id: _ } = &self.tree[it];
self.print_visibility(*visibility); self.print_visibility(*visibility);
w!(self, "enum {}", name.display(self.db)); w!(self, "enum {}", name.display(self.db.upcast()));
self.print_generic_params(generic_params); self.print_generic_params(generic_params);
self.print_where_clause_and_opening_brace(generic_params); self.print_where_clause_and_opening_brace(generic_params);
self.indented(|this| { self.indented(|this| {
for variant in variants.clone() { for variant in variants.clone() {
let Variant { name, fields, ast_id: _ } = &this.tree[variant]; let Variant { name, fields, ast_id: _ } = &this.tree[variant];
this.print_attrs_of(variant, "\n"); this.print_attrs_of(variant, "\n");
w!(this, "{}", name.display(self.db)); w!(this, "{}", name.display(self.db.upcast()));
this.print_fields(fields); this.print_fields(fields);
wln!(this, ","); wln!(this, ",");
} }
@ -333,7 +331,7 @@ impl Printer<'_> {
self.print_visibility(*visibility); self.print_visibility(*visibility);
w!(self, "const "); w!(self, "const ");
match name { match name {
Some(name) => w!(self, "{}", name.display(self.db)), Some(name) => w!(self, "{}", name.display(self.db.upcast())),
None => w!(self, "_"), None => w!(self, "_"),
} }
w!(self, ": "); w!(self, ": ");
@ -347,7 +345,7 @@ impl Printer<'_> {
if *mutable { if *mutable {
w!(self, "mut "); w!(self, "mut ");
} }
w!(self, "{}: ", name.display(self.db)); w!(self, "{}: ", name.display(self.db.upcast()));
self.print_type_ref(type_ref); self.print_type_ref(type_ref);
w!(self, " = _;"); w!(self, " = _;");
wln!(self); wln!(self);
@ -369,7 +367,7 @@ impl Printer<'_> {
if *is_auto { if *is_auto {
w!(self, "auto "); w!(self, "auto ");
} }
w!(self, "trait {}", name.display(self.db)); w!(self, "trait {}", name.display(self.db.upcast()));
self.print_generic_params(generic_params); self.print_generic_params(generic_params);
self.print_where_clause_and_opening_brace(generic_params); self.print_where_clause_and_opening_brace(generic_params);
self.indented(|this| { self.indented(|this| {
@ -382,7 +380,7 @@ impl Printer<'_> {
ModItem::TraitAlias(it) => { ModItem::TraitAlias(it) => {
let TraitAlias { name, visibility, generic_params, ast_id: _ } = &self.tree[it]; let TraitAlias { name, visibility, generic_params, ast_id: _ } = &self.tree[it];
self.print_visibility(*visibility); self.print_visibility(*visibility);
w!(self, "trait {}", name.display(self.db)); w!(self, "trait {}", name.display(self.db.upcast()));
self.print_generic_params(generic_params); self.print_generic_params(generic_params);
w!(self, " = "); w!(self, " = ");
self.print_where_clause(generic_params); self.print_where_clause(generic_params);
@ -415,7 +413,7 @@ impl Printer<'_> {
let TypeAlias { name, visibility, bounds, type_ref, generic_params, ast_id: _ } = let TypeAlias { name, visibility, bounds, type_ref, generic_params, ast_id: _ } =
&self.tree[it]; &self.tree[it];
self.print_visibility(*visibility); self.print_visibility(*visibility);
w!(self, "type {}", name.display(self.db)); w!(self, "type {}", name.display(self.db.upcast()));
self.print_generic_params(generic_params); self.print_generic_params(generic_params);
if !bounds.is_empty() { if !bounds.is_empty() {
w!(self, ": "); w!(self, ": ");
@ -432,7 +430,7 @@ impl Printer<'_> {
ModItem::Mod(it) => { ModItem::Mod(it) => {
let Mod { name, visibility, kind, ast_id: _ } = &self.tree[it]; let Mod { name, visibility, kind, ast_id: _ } = &self.tree[it];
self.print_visibility(*visibility); self.print_visibility(*visibility);
w!(self, "mod {}", name.display(self.db)); w!(self, "mod {}", name.display(self.db.upcast()));
match kind { match kind {
ModKind::Inline { items } => { ModKind::Inline { items } => {
w!(self, " {{"); w!(self, " {{");
@ -450,16 +448,16 @@ impl Printer<'_> {
} }
ModItem::MacroCall(it) => { ModItem::MacroCall(it) => {
let MacroCall { path, ast_id: _, expand_to: _ } = &self.tree[it]; let MacroCall { path, ast_id: _, expand_to: _ } = &self.tree[it];
wln!(self, "{}!(...);", path.display(self.db)); wln!(self, "{}!(...);", path.display(self.db.upcast()));
} }
ModItem::MacroRules(it) => { ModItem::MacroRules(it) => {
let MacroRules { name, ast_id: _ } = &self.tree[it]; let MacroRules { name, ast_id: _ } = &self.tree[it];
wln!(self, "macro_rules! {} {{ ... }}", name.display(self.db)); wln!(self, "macro_rules! {} {{ ... }}", name.display(self.db.upcast()));
} }
ModItem::MacroDef(it) => { ModItem::MacroDef(it) => {
let MacroDef { name, visibility, ast_id: _ } = &self.tree[it]; let MacroDef { name, visibility, ast_id: _ } = &self.tree[it];
self.print_visibility(*visibility); self.print_visibility(*visibility);
wln!(self, "macro {} {{ ... }}", name.display(self.db)); wln!(self, "macro {} {{ ... }}", name.display(self.db.upcast()));
} }
} }
@ -491,7 +489,7 @@ impl Printer<'_> {
} }
first = false; first = false;
self.print_attrs_of(idx, " "); self.print_attrs_of(idx, " ");
w!(self, "{}", lt.name.display(self.db)); w!(self, "{}", lt.name.display(self.db.upcast()));
} }
for (idx, x) in params.type_or_consts.iter() { for (idx, x) in params.type_or_consts.iter() {
if !first { if !first {
@ -501,11 +499,11 @@ impl Printer<'_> {
self.print_attrs_of(idx, " "); self.print_attrs_of(idx, " ");
match x { match x {
TypeOrConstParamData::TypeParamData(ty) => match &ty.name { TypeOrConstParamData::TypeParamData(ty) => match &ty.name {
Some(name) => w!(self, "{}", name.display(self.db)), Some(name) => w!(self, "{}", name.display(self.db.upcast())),
None => w!(self, "_anon_{}", idx.into_raw()), None => w!(self, "_anon_{}", idx.into_raw()),
}, },
TypeOrConstParamData::ConstParamData(konst) => { TypeOrConstParamData::ConstParamData(konst) => {
w!(self, "const {}: ", konst.name.display(self.db)); w!(self, "const {}: ", konst.name.display(self.db.upcast()));
self.print_type_ref(&konst.ty); self.print_type_ref(&konst.ty);
} }
} }
@ -540,8 +538,8 @@ impl Printer<'_> {
wln!( wln!(
this, this,
"{}: {},", "{}: {},",
target.name.display(self.db), target.name.display(self.db.upcast()),
bound.name.display(self.db) bound.name.display(self.db.upcast())
); );
continue; continue;
} }
@ -551,7 +549,7 @@ impl Printer<'_> {
if i != 0 { if i != 0 {
w!(this, ", "); w!(this, ", ");
} }
w!(this, "{}", lt.display(self.db)); w!(this, "{}", lt.display(self.db.upcast()));
} }
w!(this, "> "); w!(this, "> ");
(target, bound) (target, bound)
@ -562,7 +560,7 @@ impl Printer<'_> {
WherePredicateTypeTarget::TypeRef(ty) => this.print_type_ref(ty), WherePredicateTypeTarget::TypeRef(ty) => this.print_type_ref(ty),
WherePredicateTypeTarget::TypeOrConstParam(id) => { WherePredicateTypeTarget::TypeOrConstParam(id) => {
match &params.type_or_consts[*id].name() { match &params.type_or_consts[*id].name() {
Some(name) => w!(this, "{}", name.display(self.db)), Some(name) => w!(this, "{}", name.display(self.db.upcast())),
None => w!(this, "_anon_{}", id.into_raw()), None => w!(this, "_anon_{}", id.into_raw()),
} }
} }

View file

@ -2,6 +2,7 @@
//! //!
//! This attribute to tell the compiler about semi built-in std library //! This attribute to tell the compiler about semi built-in std library
//! features, such as Fn family of traits. //! features, such as Fn family of traits.
use hir_expand::name::Name;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use syntax::SmolStr; use syntax::SmolStr;
use triomphe::Arc; use triomphe::Arc;
@ -238,7 +239,17 @@ impl LangItem {
pub fn path(&self, db: &dyn DefDatabase, start_crate: CrateId) -> Option<Path> { pub fn path(&self, db: &dyn DefDatabase, start_crate: CrateId) -> Option<Path> {
let t = db.lang_item(start_crate, *self)?; let t = db.lang_item(start_crate, *self)?;
Some(Path::LangItem(t)) Some(Path::LangItem(t, None))
}
pub fn ty_rel_path(
&self,
db: &dyn DefDatabase,
start_crate: CrateId,
seg: Name,
) -> Option<Path> {
let t = db.lang_item(start_crate, *self)?;
Some(Path::LangItem(t, Some(seg)))
} }
} }

View file

@ -1,10 +1,11 @@
//! Context for lowering paths. //! Context for lowering paths.
use std::cell::OnceCell;
use hir_expand::{ use hir_expand::{
ast_id_map::{AstIdMap, AstIdNode}, ast_id_map::{AstIdMap, AstIdNode},
hygiene::Hygiene, hygiene::Hygiene,
AstId, HirFileId, InFile, AstId, HirFileId, InFile,
}; };
use once_cell::unsync::OnceCell;
use syntax::ast; use syntax::ast;
use triomphe::Arc; use triomphe::Arc;

View file

@ -22,6 +22,45 @@ fn main() { 0 as u32; }
); );
} }
#[test]
fn test_asm_expand() {
check(
r#"
#[rustc_builtin_macro]
macro_rules! asm {() => {}}
fn main() {
let i: u64 = 3;
let o: u64;
unsafe {
asm!(
"mov {0}, {1}",
"add {0}, 5",
out(reg) o,
in(reg) i,
);
}
}
"#,
expect![[r##"
#[rustc_builtin_macro]
macro_rules! asm {() => {}}
fn main() {
let i: u64 = 3;
let o: u64;
unsafe {
builtin #asm ( {
$crate::format_args!("mov {0}, {1}");
$crate::format_args!("add {0}, 5");
}
);
}
}
"##]],
);
}
#[test] #[test]
fn test_line_expand() { fn test_line_expand() {
check( check(
@ -201,7 +240,7 @@ macro_rules! format_args {
} }
fn main() { fn main() {
::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::ArgumentV1::new(&(arg1(a, b, c)), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(arg2), ::core::fmt::Debug::fmt), ]); builtin #format_args ("{} {:?}", arg1(a, b, c), arg2);
} }
"##]], "##]],
); );
@ -219,10 +258,10 @@ macro_rules! format_args {
fn main() { fn main() {
format_args!(x = 2); format_args!(x = 2);
format_args!(x =); format_args!/*+errors*/(x =);
format_args!(x =, x = 2); format_args!/*+errors*/(x =, x = 2);
format_args!("{}", x =); format_args!/*+errors*/("{}", x =);
format_args!(=, "{}", x =); format_args!/*+errors*/(=, "{}", x =);
format_args!(x = 2, "{}", 5); format_args!(x = 2, "{}", 5);
} }
"#, "#,
@ -234,12 +273,19 @@ macro_rules! format_args {
} }
fn main() { fn main() {
/* error: no rule matches input tokens */; builtin #format_args (x = 2);
/* error: expected expression */; /* parse error: expected expression */
/* error: expected expression, expected COMMA */; builtin #format_args (x = );
/* error: expected expression */::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(), ::core::fmt::Display::fmt), ]); /* parse error: expected expression */
/* error: expected expression, expected R_PAREN */; /* parse error: expected R_PAREN */
::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(5), ::core::fmt::Display::fmt), ]); /* parse error: expected expression, item or let statement */
builtin #format_args (x = , x = 2);
/* parse error: expected expression */
builtin #format_args ("{}", x = );
/* parse error: expected expression */
/* parse error: expected expression */
builtin #format_args ( = , "{}", x = );
builtin #format_args (x = 2, "{}", 5);
} }
"##]], "##]],
); );
@ -267,7 +313,7 @@ macro_rules! format_args {
} }
fn main() { fn main() {
::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::ArgumentV1::new(&(a::<A, B>()), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(b), ::core::fmt::Debug::fmt), ]); builtin #format_args ("{} {:?}", a::<A, B>(), b);
} }
"##]], "##]],
); );
@ -300,7 +346,7 @@ macro_rules! format_args {
} }
fn main() { fn main() {
::core::fmt::Arguments::new_v1(&[r#""#, r#",mismatch,""#, r#"",""#, r#"""#, ], &[::core::fmt::ArgumentV1::new(&(location_csv_pat(db, &analysis, vfs, &sm, pat_id)), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(mismatch.expected.display(db)), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(mismatch.actual.display(db)), ::core::fmt::Display::fmt), ]); builtin #format_args (r#"{},mismatch,"{}","{}""#, location_csv_pat(db, &analysis, vfs, &sm, pat_id), mismatch.expected.display(db), mismatch.actual.display(db));
} }
"##]], "##]],
); );
@ -334,7 +380,7 @@ macro_rules! format_args {
} }
fn main() { fn main() {
::core::fmt::Arguments::new_v1(&["xxx", "y", "zzz", ], &[::core::fmt::ArgumentV1::new(&(2), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(b), ::core::fmt::Debug::fmt), ]); builtin #format_args (concat!("xxx{}y", "{:?}zzz"), 2, b);
} }
"##]], "##]],
); );
@ -364,8 +410,8 @@ macro_rules! format_args {
fn main() { fn main() {
let _ = let _ =
/* error: expected field name or number *//* parse error: expected field name or number */ /* parse error: expected field name or number */
::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::ArgumentV1::new(&(a.), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(), ::core::fmt::Debug::fmt), ]); builtin #format_args ("{} {:?}", a.);
} }
"##]], "##]],
); );

View file

@ -117,7 +117,7 @@ fn main(foo: ()) {
macro_rules! format_args {} macro_rules! format_args {}
fn main(foo: ()) { fn main(foo: ()) {
/* error: unresolved macro identity */::core::fmt::Arguments::new_v1(&["", " ", " ", ], &[::core::fmt::ArgumentV1::new(&(::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(0), ::core::fmt::Display::fmt), ])), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(foo), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(identity!(10)), ::core::fmt::Display::fmt), ]) builtin #format_args ("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
} }
"##]], "##]],
); );
@ -150,8 +150,8 @@ macro_rules! identity {
} }
fn main(foo: ()) { fn main(foo: ()) {
// format_args/*+tokenids*/!("{} {} {}"#1,#3 format_args!("{}", 0#10),#12 foo#13,#14 identity!(10#18),#21 "bar"#22) // format_args/*+tokenids*/!("{} {} {}"#1,#2 format_args#3!#4("{}"#6,#7 0#8),#9 foo#10,#11 identity#12!#13(10#15),#16 "bar"#17)
::core#4294967295::fmt#4294967295::Arguments#4294967295::new_v1#4294967295(&#4294967295[#4294967295""#4294967295,#4294967295 " "#4294967295,#4294967295 " "#4294967295,#4294967295 ]#4294967295,#4294967295 &#4294967295[::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(&#4294967295(::core#4294967295::fmt#4294967295::Arguments#4294967295::new_v1#4294967295(&#4294967295[#4294967295""#4294967295,#4294967295 ]#4294967295,#4294967295 &#4294967295[::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(&#4294967295(#42949672950#10)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ]#4294967295)#4294967295)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(&#4294967295(#4294967295foo#13)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(&#4294967295(#429496729510#18)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ]#4294967295)#4294967295 builtin#4294967295 ##4294967295format_args#4294967295 (#0"{} {} {}"#1,#2 format_args#3!#4(#5"{}"#6,#7 0#8)#5,#9 foo#10,#11 identity#12!#13(#1410#15)#14,#16 "bar"#17)#0
} }
"##]], "##]],

View file

@ -929,8 +929,8 @@ fn main() {
macro_rules! format_args {} macro_rules! format_args {}
fn main() { fn main() {
/* error: expected field name or number *//* parse error: expected field name or number */ /* parse error: expected field name or number */
::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(line.1.), ::core::fmt::Display::fmt), ]); builtin #format_args ("{}", line.1.);
} }
"##]], "##]],
@ -956,19 +956,15 @@ fn main() {
macro_rules! format_args {} macro_rules! format_args {}
fn main() { fn main() {
/* error: expected COMMA, expected R_BRACK, expected COMMA, expected COMMA, expected expression, expected R_PAREN *//* parse error: expected COMMA */ /* parse error: expected COMMA */
/* parse error: expected R_BRACK */ /* parse error: expected R_BRACK */
/* parse error: expected COMMA */ /* parse error: expected COMMA */
/* parse error: expected COMMA */ /* parse error: expected COMMA */
/* parse error: expected expression */ /* parse error: expected expression */
/* parse error: expected R_PAREN */ /* parse error: expected R_PAREN */
/* parse error: expected R_PAREN */
/* parse error: expected expression, item or let statement */ /* parse error: expected expression, item or let statement */
/* parse error: expected expression, item or let statement */ /* parse error: expected expression, item or let statement */
/* parse error: expected expression, item or let statement */ builtin #format_args ("{}", &[0 2]);
/* parse error: expected expression, item or let statement */
/* parse error: expected expression, item or let statement */
::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(&[0 2]), ::core::fmt::Display::fmt), ]);
} }
"##]], "##]],

View file

@ -93,7 +93,7 @@ use crate::{
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub struct DefMap { pub struct DefMap {
_c: Count<Self>, _c: Count<Self>,
/// When this is a block def map, this will hold the block id of the the block and module that /// When this is a block def map, this will hold the block id of the block and module that
/// contains this block. /// contains this block.
block: Option<BlockInfo>, block: Option<BlockInfo>,
/// The modules and their data declared in this crate. /// The modules and their data declared in this crate.

View file

@ -1828,7 +1828,11 @@ impl ModCollector<'_, '_> {
let Some(paths) = attr.parse_path_comma_token_tree(db.upcast(), &hygiene) else { let Some(paths) = attr.parse_path_comma_token_tree(db.upcast(), &hygiene) else {
// `#[macro_use]` (without any paths) found, forget collected names and just import // `#[macro_use]` (without any paths) found, forget collected names and just import
// all visible macros. // all visible macros.
self.def_collector.import_macros_from_extern_crate(target_crate, None, Some(extern_crate_id)); self.def_collector.import_macros_from_extern_crate(
target_crate,
None,
Some(extern_crate_id),
);
return; return;
}; };
for path in paths { for path in paths {

View file

@ -47,7 +47,7 @@ pub enum Path {
}, },
/// A link to a lang item. It is used in desugaring of things like `it?`. We can show these /// A link to a lang item. It is used in desugaring of things like `it?`. We can show these
/// links via a normal path since they might be private and not accessible in the usage place. /// links via a normal path since they might be private and not accessible in the usage place.
LangItem(LangItemTarget), LangItem(LangItemTarget, Option<Name>),
} }
/// Generic arguments to a path segment (e.g. the `i32` in `Option<i32>`). This /// Generic arguments to a path segment (e.g. the `i32` in `Option<i32>`). This
@ -122,33 +122,40 @@ impl Path {
pub fn kind(&self) -> &PathKind { pub fn kind(&self) -> &PathKind {
match self { match self {
Path::Normal { mod_path, .. } => &mod_path.kind, Path::Normal { mod_path, .. } => &mod_path.kind,
Path::LangItem(_) => &PathKind::Abs, Path::LangItem(..) => &PathKind::Abs,
} }
} }
pub fn type_anchor(&self) -> Option<&TypeRef> { pub fn type_anchor(&self) -> Option<&TypeRef> {
match self { match self {
Path::Normal { type_anchor, .. } => type_anchor.as_deref(), Path::Normal { type_anchor, .. } => type_anchor.as_deref(),
Path::LangItem(_) => None, Path::LangItem(..) => None,
} }
} }
pub fn segments(&self) -> PathSegments<'_> { pub fn segments(&self) -> PathSegments<'_> {
let Path::Normal { mod_path, generic_args, .. } = self else { match self {
return PathSegments { segments: &[], generic_args: None }; Path::Normal { mod_path, generic_args, .. } => {
let s = PathSegments {
segments: mod_path.segments(),
generic_args: generic_args.as_deref(),
}; };
let s =
PathSegments { segments: mod_path.segments(), generic_args: generic_args.as_deref() };
if let Some(generic_args) = s.generic_args { if let Some(generic_args) = s.generic_args {
assert_eq!(s.segments.len(), generic_args.len()); assert_eq!(s.segments.len(), generic_args.len());
} }
s s
} }
Path::LangItem(_, seg) => PathSegments {
segments: seg.as_ref().map_or(&[], |seg| std::slice::from_ref(seg)),
generic_args: None,
},
}
}
pub fn mod_path(&self) -> Option<&ModPath> { pub fn mod_path(&self) -> Option<&ModPath> {
match self { match self {
Path::Normal { mod_path, .. } => Some(&mod_path), Path::Normal { mod_path, .. } => Some(&mod_path),
Path::LangItem(_) => None, Path::LangItem(..) => None,
} }
} }

View file

@ -2,18 +2,54 @@
use std::fmt::{self, Write}; use std::fmt::{self, Write};
use hir_expand::{db::ExpandDatabase, mod_path::PathKind}; use hir_expand::mod_path::PathKind;
use intern::Interned; use intern::Interned;
use itertools::Itertools; use itertools::Itertools;
use crate::{ use crate::{
db::DefDatabase,
lang_item::LangItemTarget,
path::{GenericArg, GenericArgs, Path}, path::{GenericArg, GenericArgs, Path},
type_ref::{Mutability, TraitBoundModifier, TypeBound, TypeRef}, type_ref::{Mutability, TraitBoundModifier, TypeBound, TypeRef},
}; };
pub(crate) fn print_path(db: &dyn ExpandDatabase, path: &Path, buf: &mut dyn Write) -> fmt::Result { pub(crate) fn print_path(db: &dyn DefDatabase, path: &Path, buf: &mut dyn Write) -> fmt::Result {
if let Path::LangItem(it) = path { if let Path::LangItem(it, s) = path {
return write!(buf, "$lang_item::{it:?}"); write!(buf, "builtin#lang(")?;
match *it {
LangItemTarget::ImplDef(it) => write!(buf, "{it:?}")?,
LangItemTarget::EnumId(it) => {
write!(buf, "{}", db.enum_data(it).name.display(db.upcast()))?
}
LangItemTarget::Function(it) => {
write!(buf, "{}", db.function_data(it).name.display(db.upcast()))?
}
LangItemTarget::Static(it) => {
write!(buf, "{}", db.static_data(it).name.display(db.upcast()))?
}
LangItemTarget::Struct(it) => {
write!(buf, "{}", db.struct_data(it).name.display(db.upcast()))?
}
LangItemTarget::Union(it) => {
write!(buf, "{}", db.union_data(it).name.display(db.upcast()))?
}
LangItemTarget::TypeAlias(it) => {
write!(buf, "{}", db.type_alias_data(it).name.display(db.upcast()))?
}
LangItemTarget::Trait(it) => {
write!(buf, "{}", db.trait_data(it).name.display(db.upcast()))?
}
LangItemTarget::EnumVariant(it) => write!(
buf,
"{}",
db.enum_data(it.parent).variants[it.local_id].name.display(db.upcast())
)?,
}
if let Some(s) = s {
write!(buf, "::{}", s.display(db.upcast()))?;
}
return write!(buf, ")");
} }
match path.type_anchor() { match path.type_anchor() {
Some(anchor) => { Some(anchor) => {
@ -44,7 +80,7 @@ pub(crate) fn print_path(db: &dyn ExpandDatabase, path: &Path, buf: &mut dyn Wri
write!(buf, "::")?; write!(buf, "::")?;
} }
write!(buf, "{}", segment.name.display(db))?; write!(buf, "{}", segment.name.display(db.upcast()))?;
if let Some(generics) = segment.args_and_bindings { if let Some(generics) = segment.args_and_bindings {
write!(buf, "::<")?; write!(buf, "::<")?;
print_generic_args(db, generics, buf)?; print_generic_args(db, generics, buf)?;
@ -57,7 +93,7 @@ pub(crate) fn print_path(db: &dyn ExpandDatabase, path: &Path, buf: &mut dyn Wri
} }
pub(crate) fn print_generic_args( pub(crate) fn print_generic_args(
db: &dyn ExpandDatabase, db: &dyn DefDatabase,
generics: &GenericArgs, generics: &GenericArgs,
buf: &mut dyn Write, buf: &mut dyn Write,
) -> fmt::Result { ) -> fmt::Result {
@ -83,7 +119,7 @@ pub(crate) fn print_generic_args(
write!(buf, ", ")?; write!(buf, ", ")?;
} }
first = false; first = false;
write!(buf, "{}", binding.name.display(db))?; write!(buf, "{}", binding.name.display(db.upcast()))?;
if !binding.bounds.is_empty() { if !binding.bounds.is_empty() {
write!(buf, ": ")?; write!(buf, ": ")?;
print_type_bounds(db, &binding.bounds, buf)?; print_type_bounds(db, &binding.bounds, buf)?;
@ -97,19 +133,19 @@ pub(crate) fn print_generic_args(
} }
pub(crate) fn print_generic_arg( pub(crate) fn print_generic_arg(
db: &dyn ExpandDatabase, db: &dyn DefDatabase,
arg: &GenericArg, arg: &GenericArg,
buf: &mut dyn Write, buf: &mut dyn Write,
) -> fmt::Result { ) -> fmt::Result {
match arg { match arg {
GenericArg::Type(ty) => print_type_ref(db, ty, buf), GenericArg::Type(ty) => print_type_ref(db, ty, buf),
GenericArg::Const(c) => write!(buf, "{}", c.display(db)), GenericArg::Const(c) => write!(buf, "{}", c.display(db.upcast())),
GenericArg::Lifetime(lt) => write!(buf, "{}", lt.name.display(db)), GenericArg::Lifetime(lt) => write!(buf, "{}", lt.name.display(db.upcast())),
} }
} }
pub(crate) fn print_type_ref( pub(crate) fn print_type_ref(
db: &dyn ExpandDatabase, db: &dyn DefDatabase,
type_ref: &TypeRef, type_ref: &TypeRef,
buf: &mut dyn Write, buf: &mut dyn Write,
) -> fmt::Result { ) -> fmt::Result {
@ -143,7 +179,7 @@ pub(crate) fn print_type_ref(
}; };
write!(buf, "&")?; write!(buf, "&")?;
if let Some(lt) = lt { if let Some(lt) = lt {
write!(buf, "{} ", lt.name.display(db))?; write!(buf, "{} ", lt.name.display(db.upcast()))?;
} }
write!(buf, "{mtbl}")?; write!(buf, "{mtbl}")?;
print_type_ref(db, pointee, buf)?; print_type_ref(db, pointee, buf)?;
@ -151,7 +187,7 @@ pub(crate) fn print_type_ref(
TypeRef::Array(elem, len) => { TypeRef::Array(elem, len) => {
write!(buf, "[")?; write!(buf, "[")?;
print_type_ref(db, elem, buf)?; print_type_ref(db, elem, buf)?;
write!(buf, "; {}]", len.display(db))?; write!(buf, "; {}]", len.display(db.upcast()))?;
} }
TypeRef::Slice(elem) => { TypeRef::Slice(elem) => {
write!(buf, "[")?; write!(buf, "[")?;
@ -198,7 +234,7 @@ pub(crate) fn print_type_ref(
} }
pub(crate) fn print_type_bounds( pub(crate) fn print_type_bounds(
db: &dyn ExpandDatabase, db: &dyn DefDatabase,
bounds: &[Interned<TypeBound>], bounds: &[Interned<TypeBound>],
buf: &mut dyn Write, buf: &mut dyn Write,
) -> fmt::Result { ) -> fmt::Result {
@ -216,10 +252,14 @@ pub(crate) fn print_type_bounds(
print_path(db, path, buf)?; print_path(db, path, buf)?;
} }
TypeBound::ForLifetime(lifetimes, path) => { TypeBound::ForLifetime(lifetimes, path) => {
write!(buf, "for<{}> ", lifetimes.iter().map(|it| it.display(db)).format(", "))?; write!(
buf,
"for<{}> ",
lifetimes.iter().map(|it| it.display(db.upcast())).format(", ")
)?;
print_path(db, path, buf)?; print_path(db, path, buf)?;
} }
TypeBound::Lifetime(lt) => write!(buf, "{}", lt.name.display(db))?, TypeBound::Lifetime(lt) => write!(buf, "{}", lt.name.display(db.upcast()))?,
TypeBound::Error => write!(buf, "{{unknown}}")?, TypeBound::Error => write!(buf, "{{unknown}}")?,
} }
} }

View file

@ -156,9 +156,8 @@ impl Resolver {
) -> Option<(TypeNs, Option<usize>, Option<ImportOrExternCrate>)> { ) -> Option<(TypeNs, Option<usize>, Option<ImportOrExternCrate>)> {
let path = match path { let path = match path {
Path::Normal { mod_path, .. } => mod_path, Path::Normal { mod_path, .. } => mod_path,
Path::LangItem(l) => { Path::LangItem(l, seg) => {
return Some(( let type_ns = match *l {
match *l {
LangItemTarget::Union(it) => TypeNs::AdtId(it.into()), LangItemTarget::Union(it) => TypeNs::AdtId(it.into()),
LangItemTarget::TypeAlias(it) => TypeNs::TypeAliasId(it), LangItemTarget::TypeAlias(it) => TypeNs::TypeAliasId(it),
LangItemTarget::Struct(it) => TypeNs::AdtId(it.into()), LangItemTarget::Struct(it) => TypeNs::AdtId(it.into()),
@ -168,10 +167,8 @@ impl Resolver {
LangItemTarget::Function(_) LangItemTarget::Function(_)
| LangItemTarget::ImplDef(_) | LangItemTarget::ImplDef(_)
| LangItemTarget::Static(_) => return None, | LangItemTarget::Static(_) => return None,
}, };
None, return Some((type_ns, seg.as_ref().map(|_| 1), None));
None,
))
} }
}; };
let first_name = path.segments().first()?; let first_name = path.segments().first()?;
@ -256,7 +253,7 @@ impl Resolver {
) -> Option<ResolveValueResult> { ) -> Option<ResolveValueResult> {
let path = match path { let path = match path {
Path::Normal { mod_path, .. } => mod_path, Path::Normal { mod_path, .. } => mod_path,
Path::LangItem(l) => { Path::LangItem(l, None) => {
return Some(ResolveValueResult::ValueNs( return Some(ResolveValueResult::ValueNs(
match *l { match *l {
LangItemTarget::Function(it) => ValueNs::FunctionId(it), LangItemTarget::Function(it) => ValueNs::FunctionId(it),
@ -272,6 +269,20 @@ impl Resolver {
None, None,
)) ))
} }
Path::LangItem(l, Some(_)) => {
let type_ns = match *l {
LangItemTarget::Union(it) => TypeNs::AdtId(it.into()),
LangItemTarget::TypeAlias(it) => TypeNs::TypeAliasId(it),
LangItemTarget::Struct(it) => TypeNs::AdtId(it.into()),
LangItemTarget::EnumVariant(it) => TypeNs::EnumVariantId(it),
LangItemTarget::EnumId(it) => TypeNs::AdtId(it.into()),
LangItemTarget::Trait(it) => TypeNs::TraitId(it),
LangItemTarget::Function(_)
| LangItemTarget::ImplDef(_)
| LangItemTarget::Static(_) => return None,
};
return Some(ResolveValueResult::Partial(type_ns, 1, None));
}
}; };
let n_segments = path.segments().len(); let n_segments = path.segments().len();
let tmp = name![self]; let tmp = name![self];

View file

@ -1,13 +1,9 @@
//! Builtin macro //! Builtin macro
use std::mem;
use ::tt::Ident;
use base_db::{AnchoredPath, Edition, FileId}; use base_db::{AnchoredPath, Edition, FileId};
use cfg::CfgExpr; use cfg::CfgExpr;
use either::Either; use either::Either;
use mbe::{parse_exprs_with_sep, parse_to_token_tree, TokenMap}; use mbe::{parse_exprs_with_sep, parse_to_token_tree, TokenMap};
use rustc_hash::FxHashMap;
use syntax::{ use syntax::{
ast::{self, AstToken}, ast::{self, AstToken},
SmolStr, SmolStr,
@ -97,11 +93,11 @@ register_builtin! {
(unreachable, Unreachable) => unreachable_expand, (unreachable, Unreachable) => unreachable_expand,
(log_syntax, LogSyntax) => log_syntax_expand, (log_syntax, LogSyntax) => log_syntax_expand,
(trace_macros, TraceMacros) => trace_macros_expand, (trace_macros, TraceMacros) => trace_macros_expand,
EAGER:
(format_args, FormatArgs) => format_args_expand, (format_args, FormatArgs) => format_args_expand,
(const_format_args, ConstFormatArgs) => format_args_expand, (const_format_args, ConstFormatArgs) => format_args_expand,
(format_args_nl, FormatArgsNl) => format_args_nl_expand, (format_args_nl, FormatArgsNl) => format_args_nl_expand,
EAGER:
(compile_error, CompileError) => compile_error_expand, (compile_error, CompileError) => compile_error_expand,
(concat, Concat) => concat_expand, (concat, Concat) => concat_expand,
(concat_idents, ConcatIdents) => concat_idents_expand, (concat_idents, ConcatIdents) => concat_idents_expand,
@ -247,152 +243,16 @@ fn format_args_expand_general(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_id: MacroCallId, _id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
end_string: &str, // FIXME: Make use of this so that mir interpretation works properly
_end_string: &str,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let args = parse_exprs_with_sep(tt, ','); let pound = quote! {@PUNCT '#'};
let mut tt = tt.clone();
let expand_error = tt.delimiter.kind = tt::DelimiterKind::Parenthesis;
ExpandResult::new(tt::Subtree::empty(), mbe::ExpandError::NoMatchingRule.into()); return ExpandResult::ok(quote! {
builtin #pound format_args #tt
let mut key_args = FxHashMap::default();
let mut args = args.into_iter().filter_map(|mut arg| {
// Remove `key =`.
if matches!(arg.token_trees.get(1), Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p))) if p.char == '=')
{
// but not with `==`
if !matches!(arg.token_trees.get(2), Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p))) if p.char == '=')
{
let key = arg.token_trees.drain(..2).next().unwrap();
key_args.insert(key.to_string(), arg);
return None;
}
}
Some(arg)
}).collect::<Vec<_>>().into_iter();
// ^^^^^^^ we need this collect, to enforce the side effect of the filter_map closure (building the `key_args`)
let Some(format_subtree) = args.next() else {
return expand_error;
};
let format_string = (|| {
let token_tree = format_subtree.token_trees.get(0)?;
match token_tree {
tt::TokenTree::Leaf(l) => match l {
tt::Leaf::Literal(l) => {
if let Some(mut text) = l.text.strip_prefix('r') {
let mut raw_sharps = String::new();
while let Some(t) = text.strip_prefix('#') {
text = t;
raw_sharps.push('#');
}
text =
text.strip_suffix(&raw_sharps)?.strip_prefix('"')?.strip_suffix('"')?;
Some((text, l.span, Some(raw_sharps)))
} else {
let text = l.text.strip_prefix('"')?.strip_suffix('"')?;
let span = l.span;
Some((text, span, None))
}
}
_ => None,
},
tt::TokenTree::Subtree(_) => None,
}
})();
let Some((format_string, _format_string_span, raw_sharps)) = format_string else {
return expand_error;
};
let mut format_iter = format_string.chars().peekable();
let mut parts = vec![];
let mut last_part = String::new();
let mut arg_tts = vec![];
let mut err = None;
while let Some(c) = format_iter.next() {
// Parsing the format string. See https://doc.rust-lang.org/std/fmt/index.html#syntax for the grammar and more info
match c {
'{' => {
if format_iter.peek() == Some(&'{') {
format_iter.next();
last_part.push('{');
continue;
}
let mut argument = String::new();
while ![Some(&'}'), Some(&':')].contains(&format_iter.peek()) {
argument.push(match format_iter.next() {
Some(c) => c,
None => return expand_error,
}); });
} }
let format_spec = match format_iter.next().unwrap() {
'}' => "".to_owned(),
':' => {
let mut s = String::new();
while let Some(c) = format_iter.next() {
if c == '}' {
break;
}
s.push(c);
}
s
}
_ => unreachable!(),
};
parts.push(mem::take(&mut last_part));
let arg_tree = if argument.is_empty() {
match args.next() {
Some(it) => it,
None => {
err = Some(mbe::ExpandError::NoMatchingRule.into());
tt::Subtree::empty()
}
}
} else if let Some(tree) = key_args.get(&argument) {
tree.clone()
} else {
// FIXME: we should pick the related substring of the `_format_string_span` as the span. You
// can use `.char_indices()` instead of `.char()` for `format_iter` to find the substring interval.
let ident = Ident::new(argument, tt::TokenId::unspecified());
quote!(#ident)
};
let formatter = match &*format_spec {
"?" => quote!(::core::fmt::Debug::fmt),
"" => quote!(::core::fmt::Display::fmt),
_ => {
// FIXME: implement the rest and return expand error here
quote!(::core::fmt::Display::fmt)
}
};
arg_tts.push(quote! { ::core::fmt::ArgumentV1::new(&(#arg_tree), #formatter), });
}
'}' => {
if format_iter.peek() == Some(&'}') {
format_iter.next();
last_part.push('}');
} else {
return expand_error;
}
}
_ => last_part.push(c),
}
}
last_part += end_string;
if !last_part.is_empty() {
parts.push(last_part);
}
let part_tts = parts.into_iter().map(|it| {
let text = if let Some(raw) = &raw_sharps {
format!("r{raw}\"{}\"{raw}", it).into()
} else {
format!("\"{}\"", it).into()
};
let l = tt::Literal { span: tt::TokenId::unspecified(), text };
quote!(#l ,)
});
let arg_tts = arg_tts.into_iter().flat_map(|arg| arg.token_trees);
let expanded = quote! {
::core::fmt::Arguments::new_v1(&[##part_tts], &[##arg_tts])
};
ExpandResult { value: expanded, err }
}
fn asm_expand( fn asm_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
@ -415,10 +275,12 @@ fn asm_expand(
} }
} }
let expanded = quote! {{ let pound = quote! {@PUNCT '#'};
##literals let expanded = quote! {
loop {} builtin #pound asm (
}}; {##literals}
)
};
ExpandResult::ok(expanded) ExpandResult::ok(expanded)
} }

View file

@ -242,7 +242,7 @@ impl HygieneFrame {
krate, krate,
call_site: None, call_site: None,
def_site: None, def_site: None,
} };
}; };
let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id)); let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id));

View file

@ -54,6 +54,12 @@ impl Name {
Name(Repr::Text(text)) Name(Repr::Text(text))
} }
// FIXME: See above, unfortunately some places really need this right now
#[doc(hidden)]
pub const fn new_text_dont_use(text: SmolStr) -> Name {
Name(Repr::Text(text))
}
pub fn new_tuple_field(idx: usize) -> Name { pub fn new_tuple_field(idx: usize) -> Name {
Name(Repr::TupleField(idx)) Name(Repr::TupleField(idx))
} }
@ -302,6 +308,16 @@ pub mod known {
rust_2018, rust_2018,
rust_2021, rust_2021,
v1, v1,
new_display,
new_debug,
new_lower_exp,
new_upper_exp,
new_octal,
new_pointer,
new_binary,
new_lower_hex,
new_upper_hex,
from_usize,
// Components of known path (type name) // Components of known path (type name)
Iterator, Iterator,
IntoIterator, IntoIterator,
@ -327,6 +343,13 @@ pub mod known {
Not, Not,
None, None,
Index, Index,
Left,
Right,
Center,
Unknown,
Is,
Param,
Implied,
// Components of known path (function name) // Components of known path (function name)
filter_map, filter_map,
next, next,
@ -335,6 +358,8 @@ pub mod known {
is_empty, is_empty,
as_str, as_str,
new, new,
new_v1_formatted,
none,
// Builtin macros // Builtin macros
asm, asm,
assert, assert,

View file

@ -32,7 +32,8 @@ once_cell = "1.17.0"
triomphe.workspace = true triomphe.workspace = true
nohash-hasher.workspace = true nohash-hasher.workspace = true
typed-arena = "2.0.1" typed-arena = "2.0.1"
rustc_index = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_index", default-features = false }
rustc_index.workspace = true
# local deps # local deps
stdx.workspace = true stdx.workspace = true

View file

@ -499,24 +499,26 @@ fn offset() {
r#" r#"
//- minicore: coerce_unsized, index, slice //- minicore: coerce_unsized, index, slice
extern "rust-intrinsic" { extern "rust-intrinsic" {
pub fn offset<T>(dst: *const T, offset: isize) -> *const T; pub fn offset<Ptr, Delta>(dst: Ptr, offset: Delta) -> Ptr;
pub fn arith_offset<T>(dst: *const T, offset: isize) -> *const T;
} }
const GOAL: u8 = unsafe { const GOAL: i32 = unsafe {
let ar: &[(u8, u8, u8)] = &[ let ar: &[(i32, i32, i32)] = &[
(10, 11, 12), (10, 11, 12),
(20, 21, 22), (20, 21, 22),
(30, 31, 32), (30, 31, 32),
(40, 41, 42), (40, 41, 42),
(50, 51, 52), (50, 51, 52),
]; ];
let ar: *const [(u8, u8, u8)] = ar; let ar: *const [(i32, i32, i32)] = ar;
let ar = ar as *const (u8, u8, u8); let ar = ar as *const (i32, i32, i32);
let element = *offset(ar, 2); let element3 = *offset(ar, 2usize);
element.1 let element4 = *arith_offset(ar, 3);
element3.1 * 100 + element4.0
}; };
"#, "#,
31, 3140,
); );
} }
@ -584,6 +586,24 @@ fn write_bytes() {
); );
} }
#[test]
fn write_via_move() {
check_number(
r#"
extern "rust-intrinsic" {
fn write_via_move<T>(ptr: *mut T, value: T);
}
const GOAL: i32 = unsafe {
let mut x = 2;
write_via_move(&mut x, 100);
x
};
"#,
100,
);
}
#[test] #[test]
fn copy() { fn copy() {
check_number( check_number(

View file

@ -163,25 +163,56 @@ impl<'a> DeclValidator<'a> {
|| allows.contains(allow::NONSTANDARD_STYLE) || allows.contains(allow::NONSTANDARD_STYLE)
}) })
}; };
let db = self.db.upcast();
is_allowed(id) let file_id_is_derive = || {
// go upwards one step or give up match id {
|| match id { AttrDefId::ModuleId(m) => {
AttrDefId::ModuleId(m) => m.containing_module(self.db.upcast()).map(|v| v.into()), m.def_map(db)[m.local_id].origin.file_id().map(Into::into)
AttrDefId::FunctionId(f) => Some(f.lookup(self.db.upcast()).container.into()), }
AttrDefId::StaticId(sid) => Some(sid.lookup(self.db.upcast()).container.into()), AttrDefId::FunctionId(f) => Some(f.lookup(db).id.file_id()),
AttrDefId::ConstId(cid) => Some(cid.lookup(self.db.upcast()).container.into()), AttrDefId::StaticId(sid) => Some(sid.lookup(db).id.file_id()),
AttrDefId::TraitId(tid) => Some(tid.lookup(self.db.upcast()).container.into()), AttrDefId::ConstId(cid) => Some(cid.lookup(db).id.file_id()),
AttrDefId::TraitAliasId(taid) => Some(taid.lookup(self.db.upcast()).container.into()), AttrDefId::TraitId(tid) => Some(tid.lookup(db).id.file_id()),
AttrDefId::ImplId(iid) => Some(iid.lookup(self.db.upcast()).container.into()), AttrDefId::TraitAliasId(taid) => Some(taid.lookup(db).id.file_id()),
AttrDefId::ExternBlockId(id) => Some(id.lookup(self.db.upcast()).container.into()), AttrDefId::ImplId(iid) => Some(iid.lookup(db).id.file_id()),
AttrDefId::ExternCrateId(id) => Some(id.lookup(self.db.upcast()).container.into()), AttrDefId::ExternBlockId(id) => Some(id.lookup(db).id.file_id()),
AttrDefId::UseId(id) => Some(id.lookup(self.db.upcast()).container.into()), AttrDefId::ExternCrateId(id) => Some(id.lookup(db).id.file_id()),
AttrDefId::UseId(id) => Some(id.lookup(db).id.file_id()),
// These warnings should not explore macro definitions at all // These warnings should not explore macro definitions at all
AttrDefId::MacroId(_) => None, AttrDefId::MacroId(_) => None,
AttrDefId::AdtId(aid) => match aid { AttrDefId::AdtId(aid) => match aid {
AdtId::StructId(sid) => Some(sid.lookup(self.db.upcast()).container.into()), AdtId::StructId(sid) => Some(sid.lookup(db).id.file_id()),
AdtId::EnumId(eid) => Some(eid.lookup(self.db.upcast()).container.into()), AdtId::EnumId(eid) => Some(eid.lookup(db).id.file_id()),
// Unions aren't yet supported
AdtId::UnionId(_) => None,
},
AttrDefId::FieldId(_) => None,
AttrDefId::EnumVariantId(_) => None,
AttrDefId::TypeAliasId(_) => None,
AttrDefId::GenericParamId(_) => None,
}
.map_or(false, |file_id| {
file_id.is_custom_derive(db.upcast()) || file_id.is_builtin_derive(db.upcast())
})
};
let parent = || {
match id {
AttrDefId::ModuleId(m) => m.containing_module(db).map(|v| v.into()),
AttrDefId::FunctionId(f) => Some(f.lookup(db).container.into()),
AttrDefId::StaticId(sid) => Some(sid.lookup(db).container.into()),
AttrDefId::ConstId(cid) => Some(cid.lookup(db).container.into()),
AttrDefId::TraitId(tid) => Some(tid.lookup(db).container.into()),
AttrDefId::TraitAliasId(taid) => Some(taid.lookup(db).container.into()),
AttrDefId::ImplId(iid) => Some(iid.lookup(db).container.into()),
AttrDefId::ExternBlockId(id) => Some(id.lookup(db).container.into()),
AttrDefId::ExternCrateId(id) => Some(id.lookup(db).container.into()),
AttrDefId::UseId(id) => Some(id.lookup(db).container.into()),
// These warnings should not explore macro definitions at all
AttrDefId::MacroId(_) => None,
AttrDefId::AdtId(aid) => match aid {
AdtId::StructId(sid) => Some(sid.lookup(db).container.into()),
AdtId::EnumId(eid) => Some(eid.lookup(db).container.into()),
// Unions aren't yet supported // Unions aren't yet supported
AdtId::UnionId(_) => None, AdtId::UnionId(_) => None,
}, },
@ -191,6 +222,12 @@ impl<'a> DeclValidator<'a> {
AttrDefId::GenericParamId(_) => None, AttrDefId::GenericParamId(_) => None,
} }
.is_some_and(|mid| self.allowed(mid, allow_name, true)) .is_some_and(|mid| self.allowed(mid, allow_name, true))
};
is_allowed(id)
// FIXME: this is a hack to avoid false positives in derive macros currently
|| file_id_is_derive()
// go upwards one step or give up
|| parent()
} }
fn validate_func(&mut self, func: FunctionId) { fn validate_func(&mut self, func: FunctionId) {

View file

@ -194,7 +194,8 @@ pub(crate) type InferResult<T> = Result<InferOk<T>, TypeError>;
#[derive(Debug, PartialEq, Eq, Clone)] #[derive(Debug, PartialEq, Eq, Clone)]
pub enum InferenceDiagnostic { pub enum InferenceDiagnostic {
NoSuchField { NoSuchField {
expr: ExprId, field: ExprOrPatId,
private: bool,
}, },
PrivateField { PrivateField {
expr: ExprId, expr: ExprId,
@ -228,6 +229,11 @@ pub enum InferenceDiagnostic {
expected: usize, expected: usize,
found: usize, found: usize,
}, },
MismatchedTupleStructPatArgCount {
pat: ExprOrPatId,
expected: usize,
found: usize,
},
ExpectedFunction { ExpectedFunction {
call_expr: ExprId, call_expr: ExprId,
found: Ty, found: Ty,

View file

@ -39,8 +39,14 @@ impl CastCheck {
} }
fn check_ref_to_ptr_cast(expr_ty: Ty, cast_ty: Ty, table: &mut InferenceTable<'_>) -> bool { fn check_ref_to_ptr_cast(expr_ty: Ty, cast_ty: Ty, table: &mut InferenceTable<'_>) -> bool {
let Some((expr_inner_ty, _, _)) = expr_ty.as_reference() else { return false; }; let Some((expr_inner_ty, _, _)) = expr_ty.as_reference() else {
let Some((cast_inner_ty, _)) = cast_ty.as_raw_ptr() else { return false; }; return false;
let TyKind::Array(expr_elt_ty, _) = expr_inner_ty.kind(Interner) else { return false; }; };
let Some((cast_inner_ty, _)) = cast_ty.as_raw_ptr() else {
return false;
};
let TyKind::Array(expr_elt_ty, _) = expr_inner_ty.kind(Interner) else {
return false;
};
table.coerce(expr_elt_ty, cast_inner_ty).is_ok() table.coerce(expr_elt_ty, cast_inner_ty).is_ok()
} }

View file

@ -452,6 +452,8 @@ impl InferenceContext<'_> {
fn walk_expr_without_adjust(&mut self, tgt_expr: ExprId) { fn walk_expr_without_adjust(&mut self, tgt_expr: ExprId) {
match &self.body[tgt_expr] { match &self.body[tgt_expr] {
Expr::OffsetOf(_) => (),
Expr::InlineAsm(e) => self.walk_expr_without_adjust(e.e),
Expr::If { condition, then_branch, else_branch } => { Expr::If { condition, then_branch, else_branch } => {
self.consume_expr(*condition); self.consume_expr(*condition);
self.consume_expr(*then_branch); self.consume_expr(*then_branch);
@ -467,13 +469,13 @@ impl InferenceContext<'_> {
Statement::Let { pat, type_ref: _, initializer, else_branch } => { Statement::Let { pat, type_ref: _, initializer, else_branch } => {
if let Some(else_branch) = else_branch { if let Some(else_branch) = else_branch {
self.consume_expr(*else_branch); self.consume_expr(*else_branch);
}
if let Some(initializer) = initializer { if let Some(initializer) = initializer {
if else_branch.is_some() {
self.consume_expr(*initializer); self.consume_expr(*initializer);
} } else {
return;
}
if let Some(initializer) = initializer {
self.walk_expr(*initializer); self.walk_expr(*initializer);
}
if let Some(place) = self.place_of_expr(*initializer) { if let Some(place) = self.place_of_expr(*initializer) {
self.consume_with_pat(place, *pat); self.consume_with_pat(place, *pat);
} }
@ -620,6 +622,7 @@ impl InferenceContext<'_> {
| Expr::Tuple { exprs, is_assignee_expr: _ } => { | Expr::Tuple { exprs, is_assignee_expr: _ } => {
self.consume_exprs(exprs.iter().copied()) self.consume_exprs(exprs.iter().copied())
} }
Expr::Missing Expr::Missing
| Expr::Continue { .. } | Expr::Continue { .. }
| Expr::Path(_) | Expr::Path(_)

View file

@ -514,9 +514,6 @@ impl InferenceContext<'_> {
} }
Expr::RecordLit { path, fields, spread, .. } => { Expr::RecordLit { path, fields, spread, .. } => {
let (ty, def_id) = self.resolve_variant(path.as_deref(), false); let (ty, def_id) = self.resolve_variant(path.as_deref(), false);
if let Some(variant) = def_id {
self.write_variant_resolution(tgt_expr.into(), variant);
}
if let Some(t) = expected.only_has_type(&mut self.table) { if let Some(t) = expected.only_has_type(&mut self.table) {
self.unify(&ty, &t); self.unify(&ty, &t);
@ -526,27 +523,57 @@ impl InferenceContext<'_> {
.as_adt() .as_adt()
.map(|(_, s)| s.clone()) .map(|(_, s)| s.clone())
.unwrap_or_else(|| Substitution::empty(Interner)); .unwrap_or_else(|| Substitution::empty(Interner));
let field_types = def_id.map(|it| self.db.field_types(it)).unwrap_or_default(); if let Some(variant) = def_id {
let variant_data = def_id.map(|it| it.variant_data(self.db.upcast())); self.write_variant_resolution(tgt_expr.into(), variant);
}
match def_id {
_ if fields.is_empty() => {}
Some(def) => {
let field_types = self.db.field_types(def);
let variant_data = def.variant_data(self.db.upcast());
let visibilities = self.db.field_visibilities(def);
for field in fields.iter() { for field in fields.iter() {
let field_def = let field_def = {
variant_data.as_ref().and_then(|it| match it.field(&field.name) { match variant_data.field(&field.name) {
Some(local_id) => Some(FieldId { parent: def_id.unwrap(), local_id }), Some(local_id) => {
if !visibilities[local_id].is_visible_from(
self.db.upcast(),
self.resolver.module(),
) {
self.push_diagnostic(
InferenceDiagnostic::NoSuchField {
field: field.expr.into(),
private: true,
},
);
}
Some(local_id)
}
None => { None => {
self.push_diagnostic(InferenceDiagnostic::NoSuchField { self.push_diagnostic(InferenceDiagnostic::NoSuchField {
expr: field.expr, field: field.expr.into(),
private: false,
}); });
None None
} }
}); }
};
let field_ty = field_def.map_or(self.err_ty(), |it| { let field_ty = field_def.map_or(self.err_ty(), |it| {
field_types[it.local_id].clone().substitute(Interner, &substs) field_types[it].clone().substitute(Interner, &substs)
}); });
// Field type might have some unknown types // Field type might have some unknown types
// FIXME: we may want to emit a single type variable for all instance of type fields? // FIXME: we may want to emit a single type variable for all instance of type fields?
let field_ty = self.insert_type_vars(field_ty); let field_ty = self.insert_type_vars(field_ty);
self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty)); self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty));
} }
}
None => {
for field in fields.iter() {
self.infer_expr_coerce(field.expr, &Expectation::None);
}
}
}
if let Some(expr) = spread { if let Some(expr) = spread {
self.infer_expr(*expr, &Expectation::has_type(ty.clone())); self.infer_expr(*expr, &Expectation::has_type(ty.clone()));
} }
@ -843,6 +870,11 @@ impl InferenceContext<'_> {
}); });
expected expected
} }
Expr::OffsetOf(_) => TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(Interner),
Expr::InlineAsm(it) => {
self.infer_expr_no_expect(it.e);
self.result.standard_types.unit.clone()
}
}; };
// use a new type variable if we got unknown here // use a new type variable if we got unknown here
let ty = self.insert_type_vars_shallow(ty); let ty = self.insert_type_vars_shallow(ty);
@ -1122,7 +1154,7 @@ impl InferenceContext<'_> {
Expr::Underscore => rhs_ty.clone(), Expr::Underscore => rhs_ty.clone(),
_ => { _ => {
// `lhs` is a place expression, a unit struct, or an enum variant. // `lhs` is a place expression, a unit struct, or an enum variant.
let lhs_ty = self.infer_expr(lhs, &Expectation::none()); let lhs_ty = self.infer_expr_inner(lhs, &Expectation::none());
// This is the only branch where this function may coerce any type. // This is the only branch where this function may coerce any type.
// We are returning early to avoid the unifiability check below. // We are returning early to avoid the unifiability check below.

View file

@ -35,6 +35,8 @@ impl InferenceContext<'_> {
fn infer_mut_expr_without_adjust(&mut self, tgt_expr: ExprId, mutability: Mutability) { fn infer_mut_expr_without_adjust(&mut self, tgt_expr: ExprId, mutability: Mutability) {
match &self.body[tgt_expr] { match &self.body[tgt_expr] {
Expr::Missing => (), Expr::Missing => (),
Expr::InlineAsm(e) => self.infer_mut_expr_without_adjust(e.e, Mutability::Not),
Expr::OffsetOf(_) => (),
&Expr::If { condition, then_branch, else_branch } => { &Expr::If { condition, then_branch, else_branch } => {
self.infer_mut_expr(condition, Mutability::Not); self.infer_mut_expr(condition, Mutability::Not);
self.infer_mut_expr(then_branch, Mutability::Not); self.infer_mut_expr(then_branch, Mutability::Not);

View file

@ -15,7 +15,8 @@ use crate::{
infer::{BindingMode, Expectation, InferenceContext, TypeMismatch}, infer::{BindingMode, Expectation, InferenceContext, TypeMismatch},
lower::lower_to_chalk_mutability, lower::lower_to_chalk_mutability,
primitive::UintTy, primitive::UintTy,
static_lifetime, Interner, Scalar, Substitution, Ty, TyBuilder, TyExt, TyKind, static_lifetime, InferenceDiagnostic, Interner, Scalar, Substitution, Ty, TyBuilder, TyExt,
TyKind,
}; };
/// Used to generalize patterns and assignee expressions. /// Used to generalize patterns and assignee expressions.
@ -74,30 +75,69 @@ impl InferenceContext<'_> {
if let Some(variant) = def { if let Some(variant) = def {
self.write_variant_resolution(id.into(), variant); self.write_variant_resolution(id.into(), variant);
} }
if let Some(var) = &var_data {
let cmp = if ellipsis.is_some() { usize::gt } else { usize::ne };
if cmp(&subs.len(), &var.fields().len()) {
self.push_diagnostic(InferenceDiagnostic::MismatchedTupleStructPatArgCount {
pat: id.into(),
expected: var.fields().len(),
found: subs.len(),
});
}
}
self.unify(&ty, expected); self.unify(&ty, expected);
let substs = let substs =
ty.as_adt().map(|(_, s)| s.clone()).unwrap_or_else(|| Substitution::empty(Interner)); ty.as_adt().map(|(_, s)| s.clone()).unwrap_or_else(|| Substitution::empty(Interner));
let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default(); match def {
_ if subs.len() == 0 => {}
Some(def) => {
let field_types = self.db.field_types(def);
let variant_data = def.variant_data(self.db.upcast());
let visibilities = self.db.field_visibilities(def);
let (pre, post) = match ellipsis { let (pre, post) = match ellipsis {
Some(idx) => subs.split_at(idx), Some(idx) => subs.split_at(idx),
None => (subs, &[][..]), None => (subs, &[][..]),
}; };
let post_idx_offset = field_tys.iter().count().saturating_sub(post.len()); let post_idx_offset = field_types.iter().count().saturating_sub(post.len());
let pre_iter = pre.iter().enumerate(); let pre_iter = pre.iter().enumerate();
let post_iter = (post_idx_offset..).zip(post.iter()); let post_iter = (post_idx_offset..).zip(post.iter());
for (i, &subpat) in pre_iter.chain(post_iter) { for (i, &subpat) in pre_iter.chain(post_iter) {
let expected_ty = var_data let field_def = {
.as_ref() match variant_data.field(&Name::new_tuple_field(i)) {
.and_then(|d| d.field(&Name::new_tuple_field(i))) Some(local_id) => {
.map_or(self.err_ty(), |field| { if !visibilities[local_id]
field_tys[field].clone().substitute(Interner, &substs) .is_visible_from(self.db.upcast(), self.resolver.module())
{
// FIXME(DIAGNOSE): private tuple field
}
Some(local_id)
}
None => None,
}
};
let expected_ty = field_def.map_or(self.err_ty(), |f| {
field_types[f].clone().substitute(Interner, &substs)
}); });
let expected_ty = self.normalize_associated_types_in(expected_ty); let expected_ty = self.normalize_associated_types_in(expected_ty);
T::infer(self, subpat, &expected_ty, default_bm); T::infer(self, subpat, &expected_ty, default_bm);
} }
}
None => {
let err_ty = self.err_ty();
for &inner in subs {
T::infer(self, inner, &err_ty, default_bm);
}
}
}
ty ty
} }
@ -109,7 +149,7 @@ impl InferenceContext<'_> {
expected: &Ty, expected: &Ty,
default_bm: T::BindingMode, default_bm: T::BindingMode,
id: T, id: T,
subs: impl Iterator<Item = (Name, T)>, subs: impl Iterator<Item = (Name, T)> + ExactSizeIterator,
) -> Ty { ) -> Ty {
let (ty, def) = self.resolve_variant(path, false); let (ty, def) = self.resolve_variant(path, false);
if let Some(variant) = def { if let Some(variant) = def {
@ -121,18 +161,52 @@ impl InferenceContext<'_> {
let substs = let substs =
ty.as_adt().map(|(_, s)| s.clone()).unwrap_or_else(|| Substitution::empty(Interner)); ty.as_adt().map(|(_, s)| s.clone()).unwrap_or_else(|| Substitution::empty(Interner));
let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default(); match def {
let var_data = def.map(|it| it.variant_data(self.db.upcast())); _ if subs.len() == 0 => {}
Some(def) => {
let field_types = self.db.field_types(def);
let variant_data = def.variant_data(self.db.upcast());
let visibilities = self.db.field_visibilities(def);
for (name, inner) in subs { for (name, inner) in subs {
let expected_ty = var_data let field_def = {
.as_ref() match variant_data.field(&name) {
.and_then(|it| it.field(&name)) Some(local_id) => {
.map_or(self.err_ty(), |f| field_tys[f].clone().substitute(Interner, &substs)); if !visibilities[local_id]
.is_visible_from(self.db.upcast(), self.resolver.module())
{
self.push_diagnostic(InferenceDiagnostic::NoSuchField {
field: inner.into(),
private: true,
});
}
Some(local_id)
}
None => {
self.push_diagnostic(InferenceDiagnostic::NoSuchField {
field: inner.into(),
private: false,
});
None
}
}
};
let expected_ty = field_def.map_or(self.err_ty(), |f| {
field_types[f].clone().substitute(Interner, &substs)
});
let expected_ty = self.normalize_associated_types_in(expected_ty); let expected_ty = self.normalize_associated_types_in(expected_ty);
T::infer(self, inner, &expected_ty, default_bm); T::infer(self, inner, &expected_ty, default_bm);
} }
}
None => {
let err_ty = self.err_ty();
for (_, inner) in subs {
T::infer(self, inner, &err_ty, default_bm);
}
}
}
ty ty
} }

View file

@ -178,13 +178,30 @@ impl InferenceContext<'_> {
remaining_index: usize, remaining_index: usize,
id: ExprOrPatId, id: ExprOrPatId,
) -> Option<(ValueNs, Substitution)> { ) -> Option<(ValueNs, Substitution)> {
assert!(remaining_index < path.segments().len());
// there may be more intermediate segments between the resolved one and // there may be more intermediate segments between the resolved one and
// the end. Only the last segment needs to be resolved to a value; from // the end. Only the last segment needs to be resolved to a value; from
// the segments before that, we need to get either a type or a trait ref. // the segments before that, we need to get either a type or a trait ref.
let resolved_segment = path.segments().get(remaining_index - 1).unwrap(); let _d;
let remaining_segments = path.segments().skip(remaining_index); let (resolved_segment, remaining_segments) = match path {
Path::Normal { .. } => {
assert!(remaining_index < path.segments().len());
(
path.segments().get(remaining_index - 1).unwrap(),
path.segments().skip(remaining_index),
)
}
Path::LangItem(..) => (
PathSegment {
name: {
_d = hir_expand::name::known::Unknown;
&_d
},
args_and_bindings: None,
},
path.segments(),
),
};
let is_before_last = remaining_segments.len() == 1; let is_before_last = remaining_segments.len() == 1;
match (def, is_before_last) { match (def, is_before_last) {

View file

@ -24,7 +24,7 @@ pub use self::{
macro_rules! user_error { macro_rules! user_error {
($it: expr) => { ($it: expr) => {
return Err(LayoutError::UserError(format!($it))) return Err(LayoutError::UserError(format!($it).into()))
}; };
} }
@ -50,7 +50,7 @@ pub type Variants = hir_def::layout::Variants<RustcEnumVariantIdx>;
#[derive(Debug, PartialEq, Eq, Clone)] #[derive(Debug, PartialEq, Eq, Clone)]
pub enum LayoutError { pub enum LayoutError {
UserError(String), UserError(Box<str>),
SizeOverflow, SizeOverflow,
TargetLayoutNotAvailable, TargetLayoutNotAvailable,
HasPlaceholder, HasPlaceholder,
@ -109,7 +109,8 @@ fn layout_of_simd_ty(
// * the homogeneous field type and the number of fields. // * the homogeneous field type and the number of fields.
let (e_ty, e_len, is_array) = if let TyKind::Array(e_ty, _) = f0_ty.kind(Interner) { let (e_ty, e_len, is_array) = if let TyKind::Array(e_ty, _) = f0_ty.kind(Interner) {
// Extract the number of elements from the layout of the array field: // Extract the number of elements from the layout of the array field:
let FieldsShape::Array { count, .. } = db.layout_of_ty(f0_ty.clone(), env.clone())?.fields else { let FieldsShape::Array { count, .. } = db.layout_of_ty(f0_ty.clone(), env.clone())?.fields
else {
user_error!("Array with non array layout"); user_error!("Array with non array layout");
}; };
@ -233,9 +234,9 @@ pub fn layout_of_ty_query(
cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)? cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)?
} }
TyKind::Array(element, count) => { TyKind::Array(element, count) => {
let count = try_const_usize(db, &count).ok_or(LayoutError::UserError( let count = try_const_usize(db, &count).ok_or(LayoutError::UserError(Box::from(
"unevaluated or mistyped const generic parameter".to_string(), "unevaluated or mistyped const generic parameter",
))? as u64; )))? as u64;
let element = db.layout_of_ty(element.clone(), trait_env.clone())?; let element = db.layout_of_ty(element.clone(), trait_env.clone())?;
let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?; let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?;

View file

@ -163,7 +163,7 @@ fn repr_discr(
return Err(LayoutError::UserError( return Err(LayoutError::UserError(
"Integer::repr_discr: `#[repr]` hint too small for \ "Integer::repr_discr: `#[repr]` hint too small for \
discriminant range of enum " discriminant range of enum "
.to_string(), .into(),
)); ));
} }
return Ok((discr, ity.is_signed())); return Ok((discr, ity.is_signed()));

View file

@ -212,14 +212,14 @@ fn recursive() {
} }
check_fail( check_fail(
r#"struct Goal(Goal);"#, r#"struct Goal(Goal);"#,
LayoutError::UserError("infinite sized recursive type".to_string()), LayoutError::UserError("infinite sized recursive type".into()),
); );
check_fail( check_fail(
r#" r#"
struct Foo<T>(Foo<T>); struct Foo<T>(Foo<T>);
struct Goal(Foo<i32>); struct Goal(Foo<i32>);
"#, "#,
LayoutError::UserError("infinite sized recursive type".to_string()), LayoutError::UserError("infinite sized recursive type".into()),
); );
} }

View file

@ -255,3 +255,17 @@ fn ellipsis_pattern() {
} }
} }
} }
#[test]
fn regression_15623() {
size_and_align_expr! {
let a = 2;
let b = 3;
let c = 5;
move || {
let 0 = a else { return b; };
let y = c;
y
}
}
}

View file

@ -1,6 +1,6 @@
//! MIR definitions and implementation //! MIR definitions and implementation
use std::{fmt::Display, iter}; use std::{collections::hash_map::Entry, fmt::Display, iter};
use crate::{ use crate::{
consteval::usize_const, consteval::usize_const,
@ -37,6 +37,7 @@ pub use monomorphization::{
monomorphize_mir_body_bad, monomorphized_mir_body_for_closure_query, monomorphize_mir_body_bad, monomorphized_mir_body_for_closure_query,
monomorphized_mir_body_query, monomorphized_mir_body_recover, monomorphized_mir_body_query, monomorphized_mir_body_recover,
}; };
use rustc_hash::FxHashMap;
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use stdx::{impl_from, never}; use stdx::{impl_from, never};
use triomphe::Arc; use triomphe::Arc;
@ -165,8 +166,8 @@ impl<V, T> ProjectionElem<V, T> {
TyKind::Adt(_, subst) => { TyKind::Adt(_, subst) => {
db.field_types(f.parent)[f.local_id].clone().substitute(Interner, subst) db.field_types(f.parent)[f.local_id].clone().substitute(Interner, subst)
} }
_ => { ty => {
never!("Only adt has field"); never!("Only adt has field, found {:?}", ty);
return TyKind::Error.intern(Interner); return TyKind::Error.intern(Interner);
} }
}, },
@ -223,35 +224,93 @@ impl<V, T> ProjectionElem<V, T> {
type PlaceElem = ProjectionElem<LocalId, Ty>; type PlaceElem = ProjectionElem<LocalId, Ty>;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ProjectionId(u32);
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ProjectionStore {
id_to_proj: FxHashMap<ProjectionId, Box<[PlaceElem]>>,
proj_to_id: FxHashMap<Box<[PlaceElem]>, ProjectionId>,
}
impl Default for ProjectionStore {
fn default() -> Self {
let mut this = Self { id_to_proj: Default::default(), proj_to_id: Default::default() };
// Ensure that [] will get the id 0 which is used in `ProjectionId::Empty`
this.intern(Box::new([]));
this
}
}
impl ProjectionStore {
fn shrink_to_fit(&mut self) {
self.id_to_proj.shrink_to_fit();
self.proj_to_id.shrink_to_fit();
}
fn intern_if_exist(&self, projection: &[PlaceElem]) -> Option<ProjectionId> {
self.proj_to_id.get(projection).copied()
}
fn intern(&mut self, projection: Box<[PlaceElem]>) -> ProjectionId {
let new_id = ProjectionId(self.proj_to_id.len() as u32);
match self.proj_to_id.entry(projection) {
Entry::Occupied(id) => *id.get(),
Entry::Vacant(e) => {
let key_clone = e.key().clone();
e.insert(new_id);
self.id_to_proj.insert(new_id, key_clone);
new_id
}
}
}
}
impl ProjectionId {
const EMPTY: ProjectionId = ProjectionId(0);
fn lookup(self, store: &ProjectionStore) -> &[PlaceElem] {
store.id_to_proj.get(&self).unwrap()
}
fn project(self, projection: PlaceElem, store: &mut ProjectionStore) -> ProjectionId {
let mut current = self.lookup(store).to_vec();
current.push(projection);
store.intern(current.into())
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Place { pub struct Place {
pub local: LocalId, pub local: LocalId,
pub projection: Box<[PlaceElem]>, pub projection: ProjectionId,
} }
impl Place { impl Place {
fn is_parent(&self, child: &Place) -> bool { fn is_parent(&self, child: &Place, store: &ProjectionStore) -> bool {
self.local == child.local && child.projection.starts_with(&self.projection) self.local == child.local
&& child.projection.lookup(store).starts_with(&self.projection.lookup(store))
} }
/// The place itself is not included /// The place itself is not included
fn iterate_over_parents(&self) -> impl Iterator<Item = Place> + '_ { fn iterate_over_parents<'a>(
(0..self.projection.len()) &'a self,
.map(|x| &self.projection[0..x]) store: &'a ProjectionStore,
.map(|x| Place { local: self.local, projection: x.to_vec().into() }) ) -> impl Iterator<Item = Place> + 'a {
let projection = self.projection.lookup(store);
(0..projection.len()).map(|x| &projection[0..x]).filter_map(move |x| {
Some(Place { local: self.local, projection: store.intern_if_exist(x)? })
})
} }
fn project(&self, projection: PlaceElem) -> Place { fn project(&self, projection: PlaceElem, store: &mut ProjectionStore) -> Place {
Place { Place { local: self.local, projection: self.projection.project(projection, store) }
local: self.local,
projection: self.projection.iter().cloned().chain([projection]).collect(),
}
} }
} }
impl From<LocalId> for Place { impl From<LocalId> for Place {
fn from(local: LocalId) -> Self { fn from(local: LocalId) -> Self {
Self { local, projection: vec![].into() } Self { local, projection: ProjectionId::EMPTY }
} }
} }
@ -997,6 +1056,7 @@ pub struct BasicBlock {
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct MirBody { pub struct MirBody {
pub projection_store: ProjectionStore,
pub basic_blocks: Arena<BasicBlock>, pub basic_blocks: Arena<BasicBlock>,
pub locals: Arena<Local>, pub locals: Arena<Local>,
pub start_block: BasicBlockId, pub start_block: BasicBlockId,
@ -1009,11 +1069,15 @@ pub struct MirBody {
} }
impl MirBody { impl MirBody {
fn walk_places(&mut self, mut f: impl FnMut(&mut Place)) { fn walk_places(&mut self, mut f: impl FnMut(&mut Place, &mut ProjectionStore)) {
fn for_operand(op: &mut Operand, f: &mut impl FnMut(&mut Place)) { fn for_operand(
op: &mut Operand,
f: &mut impl FnMut(&mut Place, &mut ProjectionStore),
store: &mut ProjectionStore,
) {
match op { match op {
Operand::Copy(p) | Operand::Move(p) => { Operand::Copy(p) | Operand::Move(p) => {
f(p); f(p, store);
} }
Operand::Constant(_) | Operand::Static(_) => (), Operand::Constant(_) | Operand::Static(_) => (),
} }
@ -1022,30 +1086,30 @@ impl MirBody {
for statement in &mut block.statements { for statement in &mut block.statements {
match &mut statement.kind { match &mut statement.kind {
StatementKind::Assign(p, r) => { StatementKind::Assign(p, r) => {
f(p); f(p, &mut self.projection_store);
match r { match r {
Rvalue::ShallowInitBoxWithAlloc(_) => (), Rvalue::ShallowInitBoxWithAlloc(_) => (),
Rvalue::ShallowInitBox(o, _) Rvalue::ShallowInitBox(o, _)
| Rvalue::UnaryOp(_, o) | Rvalue::UnaryOp(_, o)
| Rvalue::Cast(_, o, _) | Rvalue::Cast(_, o, _)
| Rvalue::Repeat(o, _) | Rvalue::Repeat(o, _)
| Rvalue::Use(o) => for_operand(o, &mut f), | Rvalue::Use(o) => for_operand(o, &mut f, &mut self.projection_store),
Rvalue::CopyForDeref(p) Rvalue::CopyForDeref(p)
| Rvalue::Discriminant(p) | Rvalue::Discriminant(p)
| Rvalue::Len(p) | Rvalue::Len(p)
| Rvalue::Ref(_, p) => f(p), | Rvalue::Ref(_, p) => f(p, &mut self.projection_store),
Rvalue::CheckedBinaryOp(_, o1, o2) => { Rvalue::CheckedBinaryOp(_, o1, o2) => {
for_operand(o1, &mut f); for_operand(o1, &mut f, &mut self.projection_store);
for_operand(o2, &mut f); for_operand(o2, &mut f, &mut self.projection_store);
} }
Rvalue::Aggregate(_, ops) => { Rvalue::Aggregate(_, ops) => {
for op in ops.iter_mut() { for op in ops.iter_mut() {
for_operand(op, &mut f); for_operand(op, &mut f, &mut self.projection_store);
} }
} }
} }
} }
StatementKind::Deinit(p) => f(p), StatementKind::Deinit(p) => f(p, &mut self.projection_store),
StatementKind::StorageLive(_) StatementKind::StorageLive(_)
| StatementKind::StorageDead(_) | StatementKind::StorageDead(_)
| StatementKind::Nop => (), | StatementKind::Nop => (),
@ -1053,7 +1117,9 @@ impl MirBody {
} }
match &mut block.terminator { match &mut block.terminator {
Some(x) => match &mut x.kind { Some(x) => match &mut x.kind {
TerminatorKind::SwitchInt { discr, .. } => for_operand(discr, &mut f), TerminatorKind::SwitchInt { discr, .. } => {
for_operand(discr, &mut f, &mut self.projection_store)
}
TerminatorKind::FalseEdge { .. } TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. } | TerminatorKind::FalseUnwind { .. }
| TerminatorKind::Goto { .. } | TerminatorKind::Goto { .. }
@ -1063,23 +1129,24 @@ impl MirBody {
| TerminatorKind::Return | TerminatorKind::Return
| TerminatorKind::Unreachable => (), | TerminatorKind::Unreachable => (),
TerminatorKind::Drop { place, .. } => { TerminatorKind::Drop { place, .. } => {
f(place); f(place, &mut self.projection_store);
} }
TerminatorKind::DropAndReplace { place, value, .. } => { TerminatorKind::DropAndReplace { place, value, .. } => {
f(place); f(place, &mut self.projection_store);
for_operand(value, &mut f); for_operand(value, &mut f, &mut self.projection_store);
} }
TerminatorKind::Call { func, args, destination, .. } => { TerminatorKind::Call { func, args, destination, .. } => {
for_operand(func, &mut f); for_operand(func, &mut f, &mut self.projection_store);
args.iter_mut().for_each(|x| for_operand(x, &mut f)); args.iter_mut()
f(destination); .for_each(|x| for_operand(x, &mut f, &mut self.projection_store));
f(destination, &mut self.projection_store);
} }
TerminatorKind::Assert { cond, .. } => { TerminatorKind::Assert { cond, .. } => {
for_operand(cond, &mut f); for_operand(cond, &mut f, &mut self.projection_store);
} }
TerminatorKind::Yield { value, resume_arg, .. } => { TerminatorKind::Yield { value, resume_arg, .. } => {
for_operand(value, &mut f); for_operand(value, &mut f, &mut self.projection_store);
f(resume_arg); f(resume_arg, &mut self.projection_store);
} }
}, },
None => (), None => (),
@ -1096,7 +1163,9 @@ impl MirBody {
binding_locals, binding_locals,
param_locals, param_locals,
closures, closures,
projection_store,
} = self; } = self;
projection_store.shrink_to_fit();
basic_blocks.shrink_to_fit(); basic_blocks.shrink_to_fit();
locals.shrink_to_fit(); locals.shrink_to_fit();
binding_locals.shrink_to_fit(); binding_locals.shrink_to_fit();

View file

@ -42,30 +42,27 @@ pub struct BorrowckResult {
fn all_mir_bodies( fn all_mir_bodies(
db: &dyn HirDatabase, db: &dyn HirDatabase,
def: DefWithBodyId, def: DefWithBodyId,
) -> Box<dyn Iterator<Item = Result<Arc<MirBody>, MirLowerError>> + '_> { mut cb: impl FnMut(Arc<MirBody>),
) -> Result<(), MirLowerError> {
fn for_closure( fn for_closure(
db: &dyn HirDatabase, db: &dyn HirDatabase,
c: ClosureId, c: ClosureId,
) -> Box<dyn Iterator<Item = Result<Arc<MirBody>, MirLowerError>> + '_> { cb: &mut impl FnMut(Arc<MirBody>),
) -> Result<(), MirLowerError> {
match db.mir_body_for_closure(c) { match db.mir_body_for_closure(c) {
Ok(body) => { Ok(body) => {
let closures = body.closures.clone(); cb(body.clone());
Box::new( body.closures.iter().map(|&it| for_closure(db, it, cb)).collect()
iter::once(Ok(body))
.chain(closures.into_iter().flat_map(|it| for_closure(db, it))),
)
} }
Err(e) => Box::new(iter::once(Err(e))), Err(e) => Err(e),
} }
} }
match db.mir_body(def) { match db.mir_body(def) {
Ok(body) => { Ok(body) => {
let closures = body.closures.clone(); cb(body.clone());
Box::new( body.closures.iter().map(|&it| for_closure(db, it, &mut cb)).collect()
iter::once(Ok(body)).chain(closures.into_iter().flat_map(|it| for_closure(db, it))),
)
} }
Err(e) => Box::new(iter::once(Err(e))), Err(e) => Err(e),
} }
} }
@ -74,17 +71,15 @@ pub fn borrowck_query(
def: DefWithBodyId, def: DefWithBodyId,
) -> Result<Arc<[BorrowckResult]>, MirLowerError> { ) -> Result<Arc<[BorrowckResult]>, MirLowerError> {
let _p = profile::span("borrowck_query"); let _p = profile::span("borrowck_query");
let r = all_mir_bodies(db, def) let mut res = vec![];
.map(|body| { all_mir_bodies(db, def, |body| {
let body = body?; res.push(BorrowckResult {
Ok(BorrowckResult {
mutability_of_locals: mutability_of_locals(db, &body), mutability_of_locals: mutability_of_locals(db, &body),
moved_out_of_ref: moved_out_of_ref(db, &body), moved_out_of_ref: moved_out_of_ref(db, &body),
mir_body: body, mir_body: body,
}) });
}) })?;
.collect::<Result<Vec<_>, MirLowerError>>()?; Ok(res.into())
Ok(r.into())
} }
fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef> { fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef> {
@ -93,7 +88,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
Operand::Copy(p) | Operand::Move(p) => { Operand::Copy(p) | Operand::Move(p) => {
let mut ty: Ty = body.locals[p.local].ty.clone(); let mut ty: Ty = body.locals[p.local].ty.clone();
let mut is_dereference_of_ref = false; let mut is_dereference_of_ref = false;
for proj in &*p.projection { for proj in p.projection.lookup(&body.projection_store) {
if *proj == ProjectionElem::Deref && ty.as_reference().is_some() { if *proj == ProjectionElem::Deref && ty.as_reference().is_some() {
is_dereference_of_ref = true; is_dereference_of_ref = true;
} }
@ -125,6 +120,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
Operand::Constant(_) | Operand::Static(_) => (), Operand::Constant(_) | Operand::Static(_) => (),
}; };
for (_, block) in body.basic_blocks.iter() { for (_, block) in body.basic_blocks.iter() {
db.unwind_if_cancelled();
for statement in &block.statements { for statement in &block.statements {
match &statement.kind { match &statement.kind {
StatementKind::Assign(_, r) => match r { StatementKind::Assign(_, r) => match r {
@ -183,6 +179,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
None => (), None => (),
} }
} }
result.shrink_to_fit();
result result
} }
@ -199,7 +196,7 @@ enum ProjectionCase {
fn place_case(db: &dyn HirDatabase, body: &MirBody, lvalue: &Place) -> ProjectionCase { fn place_case(db: &dyn HirDatabase, body: &MirBody, lvalue: &Place) -> ProjectionCase {
let mut is_part_of = false; let mut is_part_of = false;
let mut ty = body.locals[lvalue.local].ty.clone(); let mut ty = body.locals[lvalue.local].ty.clone();
for proj in lvalue.projection.iter() { for proj in lvalue.projection.lookup(&body.projection_store).iter() {
match proj { match proj {
ProjectionElem::Deref if ty.as_adt().is_none() => return ProjectionCase::Indirect, // It's indirect in case of reference and raw ProjectionElem::Deref if ty.as_adt().is_none() => return ProjectionCase::Indirect, // It's indirect in case of reference and raw
ProjectionElem::Deref // It's direct in case of `Box<T>` ProjectionElem::Deref // It's direct in case of `Box<T>`
@ -258,7 +255,7 @@ fn ever_initialized_map(
for statement in &block.statements { for statement in &block.statements {
match &statement.kind { match &statement.kind {
StatementKind::Assign(p, _) => { StatementKind::Assign(p, _) => {
if p.projection.len() == 0 && p.local == l { if p.projection.lookup(&body.projection_store).len() == 0 && p.local == l {
is_ever_initialized = true; is_ever_initialized = true;
} }
} }
@ -277,21 +274,37 @@ fn ever_initialized_map(
); );
return; return;
}; };
let targets = match &terminator.kind { let mut process = |target, is_ever_initialized| {
TerminatorKind::Goto { target } => vec![*target], if !result[target].contains_idx(l) || !result[target][l] && is_ever_initialized {
TerminatorKind::SwitchInt { targets, .. } => targets.all_targets().to_vec(), result[target].insert(l, is_ever_initialized);
dfs(db, body, target, l, result);
}
};
match &terminator.kind {
TerminatorKind::Goto { target } => process(*target, is_ever_initialized),
TerminatorKind::SwitchInt { targets, .. } => {
targets.all_targets().iter().for_each(|&it| process(it, is_ever_initialized));
}
TerminatorKind::UnwindResume TerminatorKind::UnwindResume
| TerminatorKind::Abort | TerminatorKind::Abort
| TerminatorKind::Return | TerminatorKind::Return
| TerminatorKind::Unreachable => vec![], | TerminatorKind::Unreachable => (),
TerminatorKind::Call { target, cleanup, destination, .. } => { TerminatorKind::Call { target, cleanup, destination, .. } => {
if destination.projection.len() == 0 && destination.local == l { if destination.projection.lookup(&body.projection_store).len() == 0
&& destination.local == l
{
is_ever_initialized = true; is_ever_initialized = true;
} }
target.into_iter().chain(cleanup.into_iter()).copied().collect() target
.into_iter()
.chain(cleanup.into_iter())
.for_each(|&it| process(it, is_ever_initialized));
} }
TerminatorKind::Drop { target, unwind, place: _ } => { TerminatorKind::Drop { target, unwind, place: _ } => {
Some(target).into_iter().chain(unwind.into_iter()).copied().collect() iter::once(target)
.into_iter()
.chain(unwind.into_iter())
.for_each(|&it| process(it, is_ever_initialized));
} }
TerminatorKind::DropAndReplace { .. } TerminatorKind::DropAndReplace { .. }
| TerminatorKind::Assert { .. } | TerminatorKind::Assert { .. }
@ -300,13 +313,7 @@ fn ever_initialized_map(
| TerminatorKind::FalseEdge { .. } | TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. } => { | TerminatorKind::FalseUnwind { .. } => {
never!("We don't emit these MIR terminators yet"); never!("We don't emit these MIR terminators yet");
vec![] ()
}
};
for target in targets {
if !result[target].contains_idx(l) || !result[target][l] && is_ever_initialized {
result[target].insert(l, is_ever_initialized);
dfs(db, body, target, l, result);
} }
} }
} }
@ -315,6 +322,7 @@ fn ever_initialized_map(
dfs(db, body, body.start_block, l, &mut result); dfs(db, body, body.start_block, l, &mut result);
} }
for l in body.locals.iter().map(|it| it.0) { for l in body.locals.iter().map(|it| it.0) {
db.unwind_if_cancelled();
if !result[body.start_block].contains_idx(l) { if !result[body.start_block].contains_idx(l) {
result[body.start_block].insert(l, false); result[body.start_block].insert(l, false);
dfs(db, body, body.start_block, l, &mut result); dfs(db, body, body.start_block, l, &mut result);
@ -384,7 +392,7 @@ fn mutability_of_locals(
| TerminatorKind::Assert { .. } | TerminatorKind::Assert { .. }
| TerminatorKind::Yield { .. } => (), | TerminatorKind::Yield { .. } => (),
TerminatorKind::Call { destination, .. } => { TerminatorKind::Call { destination, .. } => {
if destination.projection.len() == 0 { if destination.projection.lookup(&body.projection_store).len() == 0 {
if ever_init_map.get(destination.local).copied().unwrap_or_default() { if ever_init_map.get(destination.local).copied().unwrap_or_default() {
push_mut_span(destination.local, MirSpan::Unknown); push_mut_span(destination.local, MirSpan::Unknown);
} else { } else {

View file

@ -46,8 +46,8 @@ use crate::{
use super::{ use super::{
return_slot, AggregateKind, BasicBlockId, BinOp, CastKind, LocalId, MirBody, MirLowerError, return_slot, AggregateKind, BasicBlockId, BinOp, CastKind, LocalId, MirBody, MirLowerError,
MirSpan, Operand, Place, PlaceElem, ProjectionElem, Rvalue, StatementKind, TerminatorKind, MirSpan, Operand, Place, PlaceElem, ProjectionElem, ProjectionStore, Rvalue, StatementKind,
UnOp, TerminatorKind, UnOp,
}; };
mod shim; mod shim;
@ -215,9 +215,7 @@ impl Interval {
} }
fn write_from_interval(&self, memory: &mut Evaluator<'_>, interval: Interval) -> Result<()> { fn write_from_interval(&self, memory: &mut Evaluator<'_>, interval: Interval) -> Result<()> {
// FIXME: this could be more efficient memory.copy_from_interval(self.addr, interval)
let bytes = &interval.get(memory)?.to_vec();
memory.write_memory(self.addr, bytes)
} }
fn slice(self, range: Range<usize>) -> Interval { fn slice(self, range: Range<usize>) -> Interval {
@ -341,7 +339,7 @@ pub enum MirEvalError {
InvalidVTableId(usize), InvalidVTableId(usize),
CoerceUnsizedError(Ty), CoerceUnsizedError(Ty),
LangItemNotFound(LangItem), LangItemNotFound(LangItem),
BrokenLayout(Layout), BrokenLayout(Box<Layout>),
} }
impl MirEvalError { impl MirEvalError {
@ -410,7 +408,7 @@ impl MirEvalError {
err.pretty_print(f, db, span_formatter)?; err.pretty_print(f, db, span_formatter)?;
} }
MirEvalError::ConstEvalError(name, err) => { MirEvalError::ConstEvalError(name, err) => {
MirLowerError::ConstEvalError(name.clone(), err.clone()).pretty_print( MirLowerError::ConstEvalError((**name).into(), err.clone()).pretty_print(
f, f,
db, db,
span_formatter, span_formatter,
@ -485,17 +483,18 @@ struct DropFlags {
} }
impl DropFlags { impl DropFlags {
fn add_place(&mut self, p: Place) { fn add_place(&mut self, p: Place, store: &ProjectionStore) {
if p.iterate_over_parents().any(|it| self.need_drop.contains(&it)) { if p.iterate_over_parents(store).any(|it| self.need_drop.contains(&it)) {
return; return;
} }
self.need_drop.retain(|it| !p.is_parent(it)); self.need_drop.retain(|it| !p.is_parent(it, store));
self.need_drop.insert(p); self.need_drop.insert(p);
} }
fn remove_place(&mut self, p: &Place) -> bool { fn remove_place(&mut self, p: &Place, store: &ProjectionStore) -> bool {
// FIXME: replace parents with parts // FIXME: replace parents with parts
if let Some(parent) = p.iterate_over_parents().find(|it| self.need_drop.contains(&it)) { if let Some(parent) = p.iterate_over_parents(store).find(|it| self.need_drop.contains(&it))
{
self.need_drop.remove(&parent); self.need_drop.remove(&parent);
return true; return true;
} }
@ -656,7 +655,7 @@ impl Evaluator<'_> {
let mut addr = locals.ptr[p.local].addr; let mut addr = locals.ptr[p.local].addr;
let mut ty: Ty = locals.body.locals[p.local].ty.clone(); let mut ty: Ty = locals.body.locals[p.local].ty.clone();
let mut metadata: Option<IntervalOrOwned> = None; // locals are always sized let mut metadata: Option<IntervalOrOwned> = None; // locals are always sized
for proj in &*p.projection { for proj in p.projection.lookup(&locals.body.projection_store) {
let prev_ty = ty.clone(); let prev_ty = ty.clone();
ty = self.projected_ty(ty, proj.clone()); ty = self.projected_ty(ty, proj.clone());
match proj { match proj {
@ -837,7 +836,9 @@ impl Evaluator<'_> {
let addr = self.place_addr(l, &locals)?; let addr = self.place_addr(l, &locals)?;
let result = self.eval_rvalue(r, &mut locals)?.to_vec(&self)?; let result = self.eval_rvalue(r, &mut locals)?.to_vec(&self)?;
self.write_memory(addr, &result)?; self.write_memory(addr, &result)?;
locals.drop_flags.add_place(l.clone()); locals
.drop_flags
.add_place(l.clone(), &locals.body.projection_store);
} }
StatementKind::Deinit(_) => not_supported!("de-init statement"), StatementKind::Deinit(_) => not_supported!("de-init statement"),
StatementKind::StorageLive(_) StatementKind::StorageLive(_)
@ -889,7 +890,9 @@ impl Evaluator<'_> {
)?, )?,
it => not_supported!("unknown function type {it:?}"), it => not_supported!("unknown function type {it:?}"),
}; };
locals.drop_flags.add_place(destination.clone()); locals
.drop_flags
.add_place(destination.clone(), &locals.body.projection_store);
if let Some(stack_frame) = stack_frame { if let Some(stack_frame) = stack_frame {
self.code_stack.push(my_stack_frame); self.code_stack.push(my_stack_frame);
current_block_idx = stack_frame.locals.body.start_block; current_block_idx = stack_frame.locals.body.start_block;
@ -970,7 +973,7 @@ impl Evaluator<'_> {
) -> Result<()> { ) -> Result<()> {
let mut remain_args = body.param_locals.len(); let mut remain_args = body.param_locals.len();
for ((l, interval), value) in locals.ptr.iter().skip(1).zip(args) { for ((l, interval), value) in locals.ptr.iter().skip(1).zip(args) {
locals.drop_flags.add_place(l.into()); locals.drop_flags.add_place(l.into(), &locals.body.projection_store);
match value { match value {
IntervalOrOwned::Owned(value) => interval.write_from_bytes(self, &value)?, IntervalOrOwned::Owned(value) => interval.write_from_bytes(self, &value)?,
IntervalOrOwned::Borrowed(value) => interval.write_from_interval(self, value)?, IntervalOrOwned::Borrowed(value) => interval.write_from_interval(self, value)?,
@ -1629,7 +1632,7 @@ impl Evaluator<'_> {
if let Some((offset, size, value)) = tag { if let Some((offset, size, value)) = tag {
match result.get_mut(offset..offset + size) { match result.get_mut(offset..offset + size) {
Some(it) => it.copy_from_slice(&value.to_le_bytes()[0..size]), Some(it) => it.copy_from_slice(&value.to_le_bytes()[0..size]),
None => return Err(MirEvalError::BrokenLayout(variant_layout.clone())), None => return Err(MirEvalError::BrokenLayout(Box::new(variant_layout.clone()))),
} }
} }
for (i, op) in values.enumerate() { for (i, op) in values.enumerate() {
@ -1637,7 +1640,7 @@ impl Evaluator<'_> {
let op = op.get(&self)?; let op = op.get(&self)?;
match result.get_mut(offset..offset + op.len()) { match result.get_mut(offset..offset + op.len()) {
Some(it) => it.copy_from_slice(op), Some(it) => it.copy_from_slice(op),
None => return Err(MirEvalError::BrokenLayout(variant_layout.clone())), None => return Err(MirEvalError::BrokenLayout(Box::new(variant_layout.clone()))),
} }
} }
Ok(result) Ok(result)
@ -1646,7 +1649,7 @@ impl Evaluator<'_> {
fn eval_operand(&mut self, it: &Operand, locals: &mut Locals) -> Result<Interval> { fn eval_operand(&mut self, it: &Operand, locals: &mut Locals) -> Result<Interval> {
Ok(match it { Ok(match it {
Operand::Copy(p) | Operand::Move(p) => { Operand::Copy(p) | Operand::Move(p) => {
locals.drop_flags.remove_place(p); locals.drop_flags.remove_place(p, &locals.body.projection_store);
self.eval_place(p, locals)? self.eval_place(p, locals)?
} }
Operand::Static(st) => { Operand::Static(st) => {
@ -1760,6 +1763,48 @@ impl Evaluator<'_> {
Ok(()) Ok(())
} }
fn copy_from_interval(&mut self, addr: Address, r: Interval) -> Result<()> {
if r.size == 0 {
return Ok(());
}
let oob = || MirEvalError::UndefinedBehavior("out of bounds memory write".to_string());
match (addr, r.addr) {
(Stack(dst), Stack(src)) => {
if self.stack.len() < src + r.size || self.stack.len() < dst + r.size {
return Err(oob());
}
self.stack.copy_within(src..src + r.size, dst)
}
(Heap(dst), Heap(src)) => {
if self.stack.len() < src + r.size || self.stack.len() < dst + r.size {
return Err(oob());
}
self.heap.copy_within(src..src + r.size, dst)
}
(Stack(dst), Heap(src)) => {
self.stack
.get_mut(dst..dst + r.size)
.ok_or_else(oob)?
.copy_from_slice(self.heap.get(src..src + r.size).ok_or_else(oob)?);
}
(Heap(dst), Stack(src)) => {
self.heap
.get_mut(dst..dst + r.size)
.ok_or_else(oob)?
.copy_from_slice(self.stack.get(src..src + r.size).ok_or_else(oob)?);
}
_ => {
return Err(MirEvalError::UndefinedBehavior(format!(
"invalid memory write at address {addr:?}"
)))
}
}
Ok(())
}
fn size_align_of(&self, ty: &Ty, locals: &Locals) -> Result<Option<(usize, usize)>> { fn size_align_of(&self, ty: &Ty, locals: &Locals) -> Result<Option<(usize, usize)>> {
if let Some(layout) = self.layout_cache.borrow().get(ty) { if let Some(layout) = self.layout_cache.borrow().get(ty) {
return Ok(layout return Ok(layout
@ -2468,7 +2513,7 @@ impl Evaluator<'_> {
fn drop_place(&mut self, place: &Place, locals: &mut Locals, span: MirSpan) -> Result<()> { fn drop_place(&mut self, place: &Place, locals: &mut Locals, span: MirSpan) -> Result<()> {
let (addr, ty, metadata) = self.place_addr_and_ty_and_metadata(place, locals)?; let (addr, ty, metadata) = self.place_addr_and_ty_and_metadata(place, locals)?;
if !locals.drop_flags.remove_place(place) { if !locals.drop_flags.remove_place(place, &locals.body.projection_store) {
return Ok(()); return Ok(());
} }
let metadata = match metadata { let metadata = match metadata {

View file

@ -4,7 +4,10 @@
use std::cmp; use std::cmp;
use chalk_ir::TyKind; use chalk_ir::TyKind;
use hir_def::resolver::HasResolver; use hir_def::{
builtin_type::{BuiltinInt, BuiltinUint},
resolver::HasResolver,
};
use hir_expand::mod_path::ModPath; use hir_expand::mod_path::ModPath;
use super::*; use super::*;
@ -300,21 +303,36 @@ impl Evaluator<'_> {
BeginPanic => Err(MirEvalError::Panic("<unknown-panic-payload>".to_string())), BeginPanic => Err(MirEvalError::Panic("<unknown-panic-payload>".to_string())),
PanicFmt => { PanicFmt => {
let message = (|| { let message = (|| {
let resolver = self.db.crate_def_map(self.crate_id).crate_root().resolver(self.db.upcast()); let resolver = self
.db
.crate_def_map(self.crate_id)
.crate_root()
.resolver(self.db.upcast());
let Some(format_fn) = resolver.resolve_path_in_value_ns_fully( let Some(format_fn) = resolver.resolve_path_in_value_ns_fully(
self.db.upcast(), self.db.upcast(),
&hir_def::path::Path::from_known_path_with_no_generic(ModPath::from_segments( &hir_def::path::Path::from_known_path_with_no_generic(
ModPath::from_segments(
hir_expand::mod_path::PathKind::Abs, hir_expand::mod_path::PathKind::Abs,
[name![std], name![fmt], name![format]].into_iter(), [name![std], name![fmt], name![format]].into_iter(),
)), ),
),
) else { ) else {
not_supported!("std::fmt::format not found"); not_supported!("std::fmt::format not found");
}; };
let hir_def::resolver::ValueNs::FunctionId(format_fn) = format_fn else { not_supported!("std::fmt::format is not a function") }; let hir_def::resolver::ValueNs::FunctionId(format_fn) = format_fn else {
let message_string = self.interpret_mir(self.db.mir_body(format_fn.into()).map_err(|e| MirEvalError::MirLowerError(format_fn, e))?, args.map(|x| IntervalOrOwned::Owned(x.clone())))?; not_supported!("std::fmt::format is not a function")
let addr = Address::from_bytes(&message_string[self.ptr_size()..2 * self.ptr_size()])?; };
let message_string = self.interpret_mir(
self.db
.mir_body(format_fn.into())
.map_err(|e| MirEvalError::MirLowerError(format_fn, e))?,
args.map(|x| IntervalOrOwned::Owned(x.clone())),
)?;
let addr =
Address::from_bytes(&message_string[self.ptr_size()..2 * self.ptr_size()])?;
let size = from_bytes!(usize, message_string[2 * self.ptr_size()..]); let size = from_bytes!(usize, message_string[2 * self.ptr_size()..]);
Ok(std::string::String::from_utf8_lossy(self.read_memory(addr, size)?).into_owned()) Ok(std::string::String::from_utf8_lossy(self.read_memory(addr, size)?)
.into_owned())
})() })()
.unwrap_or_else(|e| format!("Failed to render panic format args: {e:?}")); .unwrap_or_else(|e| format!("Failed to render panic format args: {e:?}"));
Err(MirEvalError::Panic(message)) Err(MirEvalError::Panic(message))
@ -483,9 +501,7 @@ impl Evaluator<'_> {
} }
"syscall" => { "syscall" => {
let Some((id, rest)) = args.split_first() else { let Some((id, rest)) = args.split_first() else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::TypeError("syscall arg1 is not provided"));
"syscall arg1 is not provided",
));
}; };
let id = from_bytes!(i64, id.get(self)?); let id = from_bytes!(i64, id.get(self)?);
self.exec_syscall(id, rest, destination, locals, span) self.exec_syscall(id, rest, destination, locals, span)
@ -710,7 +726,8 @@ impl Evaluator<'_> {
} }
match name { match name {
"size_of" => { "size_of" => {
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError("size_of generic arg is not provided")); return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
}; };
@ -718,14 +735,17 @@ impl Evaluator<'_> {
destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size]) destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size])
} }
"min_align_of" | "pref_align_of" => { "min_align_of" | "pref_align_of" => {
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else { let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError("align_of generic arg is not provided")); return Err(MirEvalError::TypeError("align_of generic arg is not provided"));
}; };
let align = self.layout(ty)?.align.abi.bytes(); let align = self.layout(ty)?.align.abi.bytes();
destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size]) destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size])
} }
"size_of_val" => { "size_of_val" => {
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError("size_of_val generic arg is not provided")); return Err(MirEvalError::TypeError("size_of_val generic arg is not provided"));
}; };
@ -741,8 +761,12 @@ impl Evaluator<'_> {
} }
} }
"min_align_of_val" => { "min_align_of_val" => {
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else { let Some(ty) =
return Err(MirEvalError::TypeError("min_align_of_val generic arg is not provided")); generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError(
"min_align_of_val generic arg is not provided",
));
}; };
let [arg] = args else { let [arg] = args else {
return Err(MirEvalError::TypeError("min_align_of_val args are not provided")); return Err(MirEvalError::TypeError("min_align_of_val args are not provided"));
@ -756,7 +780,8 @@ impl Evaluator<'_> {
} }
} }
"type_name" => { "type_name" => {
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError("type_name generic arg is not provided")); return Err(MirEvalError::TypeError("type_name generic arg is not provided"));
}; };
@ -779,7 +804,8 @@ impl Evaluator<'_> {
.write_from_bytes(self, &len.to_le_bytes()) .write_from_bytes(self, &len.to_le_bytes())
} }
"needs_drop" => { "needs_drop" => {
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError("size_of generic arg is not provided")); return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
}; };
@ -831,9 +857,12 @@ impl Evaluator<'_> {
let lhs = i128::from_le_bytes(pad16(lhs.get(self)?, false)); let lhs = i128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = i128::from_le_bytes(pad16(rhs.get(self)?, false)); let rhs = i128::from_le_bytes(pad16(rhs.get(self)?, false));
let ans = lhs.wrapping_sub(rhs); let ans = lhs.wrapping_sub(rhs);
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError("ptr_offset_from generic arg is not provided")); return Err(MirEvalError::TypeError(
"ptr_offset_from generic arg is not provided",
));
}; };
let size = self.size_of_sized(ty, locals, "ptr_offset_from arg")? as i128; let size = self.size_of_sized(ty, locals, "ptr_offset_from arg")? as i128;
let ans = ans / size; let ans = ans / size;
@ -940,7 +969,8 @@ impl Evaluator<'_> {
"copy_nonoverlapping args are not provided", "copy_nonoverlapping args are not provided",
)); ));
}; };
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::TypeError(
"copy_nonoverlapping generic arg is not provided", "copy_nonoverlapping generic arg is not provided",
@ -959,10 +989,46 @@ impl Evaluator<'_> {
let [ptr, offset] = args else { let [ptr, offset] = args else {
return Err(MirEvalError::TypeError("offset args are not provided")); return Err(MirEvalError::TypeError("offset args are not provided"));
}; };
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) let ty = if name == "offset" {
let Some(ty0) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError("offset generic arg is not provided")); return Err(MirEvalError::TypeError("offset generic arg is not provided"));
}; };
let Some(ty1) =
generic_args.as_slice(Interner).get(1).and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError("offset generic arg is not provided"));
};
if !matches!(
ty1.as_builtin(),
Some(
BuiltinType::Int(BuiltinInt::Isize)
| BuiltinType::Uint(BuiltinUint::Usize)
)
) {
return Err(MirEvalError::TypeError(
"offset generic arg is not usize or isize",
));
}
match ty0.as_raw_ptr() {
Some((ty, _)) => ty,
None => {
return Err(MirEvalError::TypeError(
"offset generic arg is not a raw pointer",
));
}
}
} else {
let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError(
"arith_offset generic arg is not provided",
));
};
ty
};
let ptr = u128::from_le_bytes(pad16(ptr.get(self)?, false)); let ptr = u128::from_le_bytes(pad16(ptr.get(self)?, false));
let offset = u128::from_le_bytes(pad16(offset.get(self)?, false)); let offset = u128::from_le_bytes(pad16(offset.get(self)?, false));
let size = self.size_of_sized(ty, locals, "offset ptr type")? as u128; let size = self.size_of_sized(ty, locals, "offset ptr type")? as u128;
@ -1079,7 +1145,8 @@ impl Evaluator<'_> {
let [arg] = args else { let [arg] = args else {
return Err(MirEvalError::TypeError("discriminant_value arg is not provided")); return Err(MirEvalError::TypeError("discriminant_value arg is not provided"));
}; };
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::TypeError(
"discriminant_value generic arg is not provided", "discriminant_value generic arg is not provided",
@ -1133,17 +1200,32 @@ impl Evaluator<'_> {
let addr = Address::from_bytes(arg.interval.get(self)?)?; let addr = Address::from_bytes(arg.interval.get(self)?)?;
destination.write_from_interval(self, Interval { addr, size: destination.size }) destination.write_from_interval(self, Interval { addr, size: destination.size })
} }
"write_via_move" => {
let [ptr, val] = args else {
return Err(MirEvalError::TypeError("write_via_move args are not provided"));
};
let dst = Address::from_bytes(ptr.get(self)?)?;
let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError(
"write_via_copy generic arg is not provided",
));
};
let size = self.size_of_sized(ty, locals, "write_via_move ptr type")?;
Interval { addr: dst, size }.write_from_interval(self, val.interval)?;
Ok(())
}
"write_bytes" => { "write_bytes" => {
let [dst, val, count] = args else { let [dst, val, count] = args else {
return Err(MirEvalError::TypeError("write_bytes args are not provided")); return Err(MirEvalError::TypeError("write_bytes args are not provided"));
}; };
let count = from_bytes!(usize, count.get(self)?); let count = from_bytes!(usize, count.get(self)?);
let val = from_bytes!(u8, val.get(self)?); let val = from_bytes!(u8, val.get(self)?);
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::TypeError("write_bytes generic arg is not provided"));
"write_bytes generic arg is not provided",
));
}; };
let dst = Address::from_bytes(dst.get(self)?)?; let dst = Address::from_bytes(dst.get(self)?)?;
let size = self.size_of_sized(ty, locals, "copy_nonoverlapping ptr type")?; let size = self.size_of_sized(ty, locals, "copy_nonoverlapping ptr type")?;

View file

@ -45,7 +45,9 @@ impl Evaluator<'_> {
}; };
match try_const_usize(self.db, len) { match try_const_usize(self.db, len) {
Some(len) => { Some(len) => {
let Some(ty) = subst.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else { let Some(ty) =
subst.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError("simd type with no ty param")); return Err(MirEvalError::TypeError("simd type with no ty param"));
}; };
Ok((len as usize, ty.clone())) Ok((len as usize, ty.clone()))

View file

@ -71,7 +71,7 @@ struct MirLowerCtx<'a> {
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum MirLowerError { pub enum MirLowerError {
ConstEvalError(String, Box<ConstEvalError>), ConstEvalError(Box<str>, Box<ConstEvalError>),
LayoutError(LayoutError), LayoutError(LayoutError),
IncompleteExpr, IncompleteExpr,
IncompletePattern, IncompletePattern,
@ -84,7 +84,7 @@ pub enum MirLowerError {
UnsizedTemporary(Ty), UnsizedTemporary(Ty),
MissingFunctionDefinition(DefWithBodyId, ExprId), MissingFunctionDefinition(DefWithBodyId, ExprId),
TypeMismatch(TypeMismatch), TypeMismatch(TypeMismatch),
/// This should be never happen. Type mismatch should catch everything. /// This should never happen. Type mismatch should catch everything.
TypeError(&'static str), TypeError(&'static str),
NotSupported(String), NotSupported(String),
ContinueWithoutLoop, ContinueWithoutLoop,
@ -244,6 +244,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
let locals = Arena::new(); let locals = Arena::new();
let binding_locals: ArenaMap<BindingId, LocalId> = ArenaMap::new(); let binding_locals: ArenaMap<BindingId, LocalId> = ArenaMap::new();
let mir = MirBody { let mir = MirBody {
projection_store: ProjectionStore::default(),
basic_blocks, basic_blocks,
locals, locals,
start_block, start_block,
@ -370,6 +371,12 @@ impl<'ctx> MirLowerCtx<'ctx> {
mut current: BasicBlockId, mut current: BasicBlockId,
) -> Result<Option<BasicBlockId>> { ) -> Result<Option<BasicBlockId>> {
match &self.body.exprs[expr_id] { match &self.body.exprs[expr_id] {
Expr::OffsetOf(_) => {
not_supported!("builtin#offset_of")
}
Expr::InlineAsm(_) => {
not_supported!("builtin#asm")
}
Expr::Missing => { Expr::Missing => {
if let DefWithBodyId::FunctionId(f) = self.owner { if let DefWithBodyId::FunctionId(f) = self.owner {
let assoc = f.lookup(self.db.upcast()); let assoc = f.lookup(self.db.upcast());
@ -803,10 +810,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
current = c; current = c;
operands[u32::from(field_id.into_raw()) as usize] = Some(op); operands[u32::from(field_id.into_raw()) as usize] = Some(op);
} }
self.push_assignment( let rvalue = Rvalue::Aggregate(
current,
place,
Rvalue::Aggregate(
AggregateKind::Adt(variant_id, subst), AggregateKind::Adt(variant_id, subst),
match spread_place { match spread_place {
Some(sp) => operands Some(sp) => operands
@ -815,13 +819,15 @@ impl<'ctx> MirLowerCtx<'ctx> {
.map(|(i, it)| match it { .map(|(i, it)| match it {
Some(it) => it, Some(it) => it,
None => { None => {
let p = let p = sp.project(
sp.project(ProjectionElem::Field(FieldId { ProjectionElem::Field(FieldId {
parent: variant_id, parent: variant_id,
local_id: LocalFieldId::from_raw( local_id: LocalFieldId::from_raw(RawIdx::from(
RawIdx::from(i as u32), i as u32,
), )),
})); }),
&mut self.result.projection_store,
);
Operand::Copy(p) Operand::Copy(p)
} }
}) })
@ -830,9 +836,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
MirLowerError::TypeError("missing field in record literal"), MirLowerError::TypeError("missing field in record literal"),
)?, )?,
}, },
),
expr_id.into(),
); );
self.push_assignment(current, place, rvalue, expr_id.into());
Ok(Some(current)) Ok(Some(current))
} }
VariantId::UnionId(union_id) => { VariantId::UnionId(union_id) => {
@ -841,10 +846,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
}; };
let local_id = let local_id =
variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?; variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?;
let place = place.project(PlaceElem::Field(FieldId { let place = place.project(
parent: union_id.into(), PlaceElem::Field(FieldId { parent: union_id.into(), local_id }),
local_id, &mut self.result.projection_store,
})); );
self.lower_expr_to_place(*expr, place, current) self.lower_expr_to_place(*expr, place, current)
} }
} }
@ -898,7 +903,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
else { else {
return Ok(None); return Ok(None);
}; };
let p = place.project(ProjectionElem::Deref); let p = place.project(ProjectionElem::Deref, &mut self.result.projection_store);
self.push_assignment(current, p, operand.into(), expr_id.into()); self.push_assignment(current, p, operand.into(), expr_id.into());
Ok(Some(current)) Ok(Some(current))
} }
@ -1120,7 +1125,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
for capture in captures.iter() { for capture in captures.iter() {
let p = Place { let p = Place {
local: self.binding_local(capture.place.local)?, local: self.binding_local(capture.place.local)?,
projection: capture projection: self.result.projection_store.intern(
capture
.place .place
.projections .projections
.clone() .clone()
@ -1137,10 +1143,13 @@ impl<'ctx> MirLowerCtx<'ctx> {
ProjectionElem::Subslice { from, to } => { ProjectionElem::Subslice { from, to } => {
ProjectionElem::Subslice { from, to } ProjectionElem::Subslice { from, to }
} }
ProjectionElem::OpaqueCast(it) => ProjectionElem::OpaqueCast(it), ProjectionElem::OpaqueCast(it) => {
ProjectionElem::OpaqueCast(it)
}
ProjectionElem::Index(it) => match it {}, ProjectionElem::Index(it) => match it {},
}) })
.collect(), .collect(),
),
}; };
match &capture.kind { match &capture.kind {
CaptureKind::ByRef(bk) => { CaptureKind::ByRef(bk) => {
@ -1201,7 +1210,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
let Some(values) = elements let Some(values) = elements
.iter() .iter()
.map(|it| { .map(|it| {
let Some((o, c)) = self.lower_expr_to_some_operand(*it, current)? else { let Some((o, c)) = self.lower_expr_to_some_operand(*it, current)?
else {
return Ok(None); return Ok(None);
}; };
current = c; current = c;
@ -1254,12 +1264,12 @@ impl<'ctx> MirLowerCtx<'ctx> {
match &self.body.exprs[lhs] { match &self.body.exprs[lhs] {
Expr::Tuple { exprs, is_assignee_expr: _ } => { Expr::Tuple { exprs, is_assignee_expr: _ } => {
for (i, expr) in exprs.iter().enumerate() { for (i, expr) in exprs.iter().enumerate() {
let Some(c) = self.lower_destructing_assignment( let rhs = rhs.project(
current, ProjectionElem::TupleOrClosureField(i),
*expr, &mut self.result.projection_store,
rhs.project(ProjectionElem::TupleOrClosureField(i)), );
span, let Some(c) = self.lower_destructing_assignment(current, *expr, rhs, span)?
)? else { else {
return Ok(None); return Ok(None);
}; };
current = c; current = c;
@ -1268,8 +1278,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
} }
Expr::Underscore => Ok(Some(current)), Expr::Underscore => Ok(Some(current)),
_ => { _ => {
let Some((lhs_place, current)) = let Some((lhs_place, current)) = self.lower_expr_as_place(current, lhs, false)?
self.lower_expr_as_place(current, lhs, false)?
else { else {
return Ok(None); return Ok(None);
}; };
@ -1286,9 +1295,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
rhs: ExprId, rhs: ExprId,
span: MirSpan, span: MirSpan,
) -> Result<Option<BasicBlockId>> { ) -> Result<Option<BasicBlockId>> {
let Some((rhs_op, current)) = let Some((rhs_op, current)) = self.lower_expr_to_some_operand(rhs, current)? else {
self.lower_expr_to_some_operand(rhs, current)?
else {
return Ok(None); return Ok(None);
}; };
if matches!(&self.body.exprs[lhs], Expr::Underscore) { if matches!(&self.body.exprs[lhs], Expr::Underscore) {
@ -1303,9 +1310,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.push_assignment(current, temp.clone(), rhs_op.into(), span); self.push_assignment(current, temp.clone(), rhs_op.into(), span);
return self.lower_destructing_assignment(current, lhs, temp, span); return self.lower_destructing_assignment(current, lhs, temp, span);
} }
let Some((lhs_place, current)) = let Some((lhs_place, current)) = self.lower_expr_as_place(current, lhs, false)? else {
self.lower_expr_as_place(current, lhs, false)?
else {
return Ok(None); return Ok(None);
}; };
self.push_assignment(current, lhs_place, rhs_op.into(), span); self.push_assignment(current, lhs_place, rhs_op.into(), span);
@ -1320,17 +1325,21 @@ impl<'ctx> MirLowerCtx<'ctx> {
placeholder_subst placeholder_subst
} }
fn push_field_projection(&self, place: &mut Place, expr_id: ExprId) -> Result<()> { fn push_field_projection(&mut self, place: &mut Place, expr_id: ExprId) -> Result<()> {
if let Expr::Field { expr, name } = &self.body[expr_id] { if let Expr::Field { expr, name } = &self.body[expr_id] {
if let TyKind::Tuple(..) = self.expr_ty_after_adjustments(*expr).kind(Interner) { if let TyKind::Tuple(..) = self.expr_ty_after_adjustments(*expr).kind(Interner) {
let index = name let index = name
.as_tuple_index() .as_tuple_index()
.ok_or(MirLowerError::TypeError("named field on tuple"))?; .ok_or(MirLowerError::TypeError("named field on tuple"))?;
*place = place.project(ProjectionElem::TupleOrClosureField(index)) *place = place.project(
ProjectionElem::TupleOrClosureField(index),
&mut self.result.projection_store,
)
} else { } else {
let field = let field =
self.infer.field_resolution(expr_id).ok_or(MirLowerError::UnresolvedField)?; self.infer.field_resolution(expr_id).ok_or(MirLowerError::UnresolvedField)?;
*place = place.project(ProjectionElem::Field(field)); *place =
place.project(ProjectionElem::Field(field), &mut self.result.projection_store);
} }
} else { } else {
not_supported!("") not_supported!("")
@ -1447,7 +1456,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
let name = const_id.name(self.db.upcast()); let name = const_id.name(self.db.upcast());
self.db self.db
.const_eval(const_id.into(), subst, None) .const_eval(const_id.into(), subst, None)
.map_err(|e| MirLowerError::ConstEvalError(name, Box::new(e)))? .map_err(|e| MirLowerError::ConstEvalError(name.into(), Box::new(e)))?
}; };
Ok(Operand::Constant(c)) Ok(Operand::Constant(c))
} }
@ -1844,7 +1853,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
data.name.display(self.db.upcast()), data.name.display(self.db.upcast()),
data.variants[variant.local_id].name.display(self.db.upcast()) data.variants[variant.local_id].name.display(self.db.upcast())
); );
Err(MirLowerError::ConstEvalError(name, Box::new(e))) Err(MirLowerError::ConstEvalError(name.into(), Box::new(e)))
} }
} }
} }
@ -1992,13 +2001,14 @@ pub fn mir_body_for_closure_query(
FnTrait::FnOnce => vec![], FnTrait::FnOnce => vec![],
FnTrait::FnMut | FnTrait::Fn => vec![ProjectionElem::Deref], FnTrait::FnMut | FnTrait::Fn => vec![ProjectionElem::Deref],
}; };
ctx.result.walk_places(|p| { ctx.result.walk_places(|p, store| {
if let Some(it) = upvar_map.get(&p.local) { if let Some(it) = upvar_map.get(&p.local) {
let r = it.iter().find(|it| { let r = it.iter().find(|it| {
if p.projection.len() < it.0.place.projections.len() { if p.projection.lookup(&store).len() < it.0.place.projections.len() {
return false; return false;
} }
for (it, y) in p.projection.iter().zip(it.0.place.projections.iter()) { for (it, y) in p.projection.lookup(&store).iter().zip(it.0.place.projections.iter())
{
match (it, y) { match (it, y) {
(ProjectionElem::Deref, ProjectionElem::Deref) => (), (ProjectionElem::Deref, ProjectionElem::Deref) => (),
(ProjectionElem::Field(it), ProjectionElem::Field(y)) if it == y => (), (ProjectionElem::Field(it), ProjectionElem::Field(y)) if it == y => (),
@ -2016,13 +2026,18 @@ pub fn mir_body_for_closure_query(
p.local = closure_local; p.local = closure_local;
let mut next_projs = closure_projection.clone(); let mut next_projs = closure_projection.clone();
next_projs.push(PlaceElem::TupleOrClosureField(it.1)); next_projs.push(PlaceElem::TupleOrClosureField(it.1));
let prev_projs = mem::take(&mut p.projection); let prev_projs = p.projection;
if it.0.kind != CaptureKind::ByValue { if it.0.kind != CaptureKind::ByValue {
next_projs.push(ProjectionElem::Deref); next_projs.push(ProjectionElem::Deref);
} }
next_projs next_projs.extend(
.extend(prev_projs.iter().cloned().skip(it.0.place.projections.len())); prev_projs
p.projection = next_projs.into(); .lookup(&store)
.iter()
.cloned()
.skip(it.0.place.projections.len()),
);
p.projection = store.intern(next_projs.into());
} }
None => err = Some(p.clone()), None => err = Some(p.clone()),
} }

View file

@ -70,7 +70,7 @@ impl MirLowerCtx<'_> {
else { else {
return Ok(None); return Ok(None);
}; };
it.0 = it.0.project(ProjectionElem::Deref); it.0 = it.0.project(ProjectionElem::Deref, &mut self.result.projection_store);
Ok(Some(it)) Ok(Some(it))
} }
Adjust::Deref(Some(od)) => { Adjust::Deref(Some(od)) => {
@ -152,7 +152,10 @@ impl MirLowerCtx<'_> {
Operand::Static(s).into(), Operand::Static(s).into(),
expr_id.into(), expr_id.into(),
); );
Ok(Some((temp.project(ProjectionElem::Deref), current))) Ok(Some((
temp.project(ProjectionElem::Deref, &mut self.result.projection_store),
current,
)))
} }
_ => try_rvalue(self), _ => try_rvalue(self),
} }
@ -203,7 +206,7 @@ impl MirLowerCtx<'_> {
else { else {
return Ok(None); return Ok(None);
}; };
r = r.project(ProjectionElem::Deref); r = r.project(ProjectionElem::Deref, &mut self.result.projection_store);
Ok(Some((r, current))) Ok(Some((r, current)))
} }
_ => try_rvalue(self), _ => try_rvalue(self),
@ -267,7 +270,8 @@ impl MirLowerCtx<'_> {
else { else {
return Ok(None); return Ok(None);
}; };
p_base = p_base.project(ProjectionElem::Index(l_index)); p_base = p_base
.project(ProjectionElem::Index(l_index), &mut self.result.projection_store);
Ok(Some((p_base, current))) Ok(Some((p_base, current)))
} }
_ => try_rvalue(self), _ => try_rvalue(self),
@ -308,7 +312,7 @@ impl MirLowerCtx<'_> {
else { else {
return Ok(None); return Ok(None);
}; };
result = result.project(ProjectionElem::Deref); result = result.project(ProjectionElem::Deref, &mut self.result.projection_store);
Ok(Some((result, current))) Ok(Some((result, current)))
} }
@ -363,7 +367,7 @@ impl MirLowerCtx<'_> {
else { else {
return Ok(None); return Ok(None);
}; };
result = result.project(ProjectionElem::Deref); result = result.project(ProjectionElem::Deref, &mut self.result.projection_store);
Ok(Some((result, current))) Ok(Some((result, current)))
} }
} }

View file

@ -81,13 +81,16 @@ impl MirLowerCtx<'_> {
mode: MatchingMode, mode: MatchingMode,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> { ) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
let cnt = self.infer.pat_adjustments.get(&pattern).map(|x| x.len()).unwrap_or_default(); let cnt = self.infer.pat_adjustments.get(&pattern).map(|x| x.len()).unwrap_or_default();
cond_place.projection = cond_place cond_place.projection = self.result.projection_store.intern(
cond_place
.projection .projection
.lookup(&self.result.projection_store)
.iter() .iter()
.cloned() .cloned()
.chain((0..cnt).map(|_| ProjectionElem::Deref)) .chain((0..cnt).map(|_| ProjectionElem::Deref))
.collect::<Vec<_>>() .collect::<Vec<_>>()
.into(); .into(),
);
Ok(match &self.body.pats[pattern] { Ok(match &self.body.pats[pattern] {
Pat::Missing => return Err(MirLowerError::IncompletePattern), Pat::Missing => return Err(MirLowerError::IncompletePattern),
Pat::Wild => (current, current_else), Pat::Wild => (current, current_else),
@ -262,20 +265,23 @@ impl MirLowerCtx<'_> {
} }
} }
for (i, &pat) in prefix.iter().enumerate() { for (i, &pat) in prefix.iter().enumerate() {
let next_place = (&mut cond_place).project(ProjectionElem::ConstantIndex { let next_place = (&mut cond_place).project(
offset: i as u64, ProjectionElem::ConstantIndex { offset: i as u64, from_end: false },
from_end: false, &mut self.result.projection_store,
}); );
(current, current_else) = (current, current_else) =
self.pattern_match_inner(current, current_else, next_place, pat, mode)?; self.pattern_match_inner(current, current_else, next_place, pat, mode)?;
} }
if let Some(slice) = slice { if let Some(slice) = slice {
if mode == MatchingMode::Bind { if mode == MatchingMode::Bind {
if let Pat::Bind { id, subpat: _ } = self.body[*slice] { if let Pat::Bind { id, subpat: _ } = self.body[*slice] {
let next_place = (&mut cond_place).project(ProjectionElem::Subslice { let next_place = (&mut cond_place).project(
ProjectionElem::Subslice {
from: prefix.len() as u64, from: prefix.len() as u64,
to: suffix.len() as u64, to: suffix.len() as u64,
}); },
&mut self.result.projection_store,
);
(current, current_else) = self.pattern_match_binding( (current, current_else) = self.pattern_match_binding(
id, id,
next_place, next_place,
@ -287,10 +293,10 @@ impl MirLowerCtx<'_> {
} }
} }
for (i, &pat) in suffix.iter().enumerate() { for (i, &pat) in suffix.iter().enumerate() {
let next_place = (&mut cond_place).project(ProjectionElem::ConstantIndex { let next_place = (&mut cond_place).project(
offset: i as u64, ProjectionElem::ConstantIndex { offset: i as u64, from_end: true },
from_end: true, &mut self.result.projection_store,
}); );
(current, current_else) = (current, current_else) =
self.pattern_match_inner(current, current_else, next_place, pat, mode)?; self.pattern_match_inner(current, current_else, next_place, pat, mode)?;
} }
@ -412,13 +418,11 @@ impl MirLowerCtx<'_> {
mode, mode,
)? )?
} }
Pat::Ref { pat, mutability: _ } => self.pattern_match_inner( Pat::Ref { pat, mutability: _ } => {
current, let cond_place =
current_else, cond_place.project(ProjectionElem::Deref, &mut self.result.projection_store);
cond_place.project(ProjectionElem::Deref), self.pattern_match_inner(current, current_else, cond_place, *pat, mode)?
*pat, }
mode,
)?,
Pat::Box { .. } => not_supported!("box pattern"), Pat::Box { .. } => not_supported!("box pattern"),
Pat::ConstBlock(_) => not_supported!("const block pattern"), Pat::ConstBlock(_) => not_supported!("const block pattern"),
}) })
@ -594,7 +598,7 @@ impl MirLowerCtx<'_> {
mode: MatchingMode, mode: MatchingMode,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> { ) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
for (proj, arg) in args { for (proj, arg) in args {
let cond_place = cond_place.project(proj); let cond_place = cond_place.project(proj, &mut self.result.projection_store);
(current, current_else) = (current, current_else) =
self.pattern_match_inner(current, current_else, cond_place, arg, mode)?; self.pattern_match_inner(current, current_else, cond_place, arg, mode)?;
} }

View file

@ -329,7 +329,7 @@ impl<'a> MirPrettyCtx<'a> {
} }
} }
} }
f(self, p.local, &p.projection); f(self, p.local, &p.projection.lookup(&self.body.projection_store));
} }
fn operand(&mut self, r: &Operand) { fn operand(&mut self, r: &Operand) {

View file

@ -2,55 +2,6 @@ use expect_test::expect;
use super::{check, check_infer, check_no_mismatches, check_types}; use super::{check, check_infer, check_no_mismatches, check_types};
#[test]
fn infer_box() {
check_types(
r#"
//- /main.rs crate:main deps:std
fn test() {
let x = box 1;
let t = (x, box x, box &1, box [1]);
t;
} //^ (Box<i32>, Box<Box<i32>>, Box<&i32>, Box<[i32; 1]>)
//- /std.rs crate:std
#[prelude_import] use prelude::*;
mod prelude {}
mod boxed {
#[lang = "owned_box"]
pub struct Box<T: ?Sized> {
inner: *mut T,
}
}
"#,
);
}
#[test]
fn infer_box_with_allocator() {
check_types(
r#"
//- /main.rs crate:main deps:std
fn test() {
let x = box 1;
let t = (x, box x, box &1, box [1]);
t;
} //^ (Box<i32, {unknown}>, Box<Box<i32, {unknown}>, {unknown}>, Box<&i32, {unknown}>, Box<[i32; 1], {unknown}>)
//- /std.rs crate:std
#[prelude_import] use prelude::*;
mod boxed {
#[lang = "owned_box"]
pub struct Box<T: ?Sized, A: Allocator> {
inner: *mut T,
allocator: A,
}
}
"#,
);
}
#[test] #[test]
fn infer_adt_self() { fn infer_adt_self() {
check_types( check_types(
@ -2763,8 +2714,8 @@ impl<T> [T] {
} }
fn test() { fn test() {
let vec = <[_]>::into_vec(box [1i32]); let vec = <[_]>::into_vec(#[rustc_box] Box::new([1i32]));
let v: Vec<Box<dyn B>> = <[_]> :: into_vec(box [box Astruct]); let v: Vec<Box<dyn B>> = <[_]> :: into_vec(#[rustc_box] Box::new([#[rustc_box] Box::new(Astruct)]));
} }
trait B{} trait B{}
@ -2774,20 +2725,20 @@ impl B for Astruct {}
expect![[r#" expect![[r#"
604..608 'self': Box<[T], A> 604..608 'self': Box<[T], A>
637..669 '{ ... }': Vec<T, A> 637..669 '{ ... }': Vec<T, A>
683..796 '{ ...t]); }': () 683..853 '{ ...])); }': ()
693..696 'vec': Vec<i32, Global> 693..696 'vec': Vec<i32, Global>
699..714 '<[_]>::into_vec': fn into_vec<i32, Global>(Box<[i32], Global>) -> Vec<i32, Global> 699..714 '<[_]>::into_vec': fn into_vec<i32, Global>(Box<[i32], Global>) -> Vec<i32, Global>
699..726 '<[_]>:...1i32])': Vec<i32, Global> 699..745 '<[_]>:...i32]))': Vec<i32, Global>
715..725 'box [1i32]': Box<[i32; 1], Global> 715..744 '#[rust...1i32])': Box<[i32; 1], Global>
719..725 '[1i32]': [i32; 1] 737..743 '[1i32]': [i32; 1]
720..724 '1i32': i32 738..742 '1i32': i32
736..737 'v': Vec<Box<dyn B, Global>, Global> 755..756 'v': Vec<Box<dyn B, Global>, Global>
757..774 '<[_]> ...to_vec': fn into_vec<Box<dyn B, Global>, Global>(Box<[Box<dyn B, Global>], Global>) -> Vec<Box<dyn B, Global>, Global> 776..793 '<[_]> ...to_vec': fn into_vec<Box<dyn B, Global>, Global>(Box<[Box<dyn B, Global>], Global>) -> Vec<Box<dyn B, Global>, Global>
757..793 '<[_]> ...ruct])': Vec<Box<dyn B, Global>, Global> 776..850 '<[_]> ...ct)]))': Vec<Box<dyn B, Global>, Global>
775..792 'box [b...truct]': Box<[Box<dyn B, Global>; 1], Global> 794..849 '#[rust...uct)])': Box<[Box<dyn B, Global>; 1], Global>
779..792 '[box Astruct]': [Box<dyn B, Global>; 1] 816..848 '[#[rus...ruct)]': [Box<dyn B, Global>; 1]
780..791 'box Astruct': Box<Astruct, Global> 817..847 '#[rust...truct)': Box<Astruct, Global>
784..791 'Astruct': Astruct 839..846 'Astruct': Astruct
"#]], "#]],
) )
} }
@ -3649,3 +3600,30 @@ fn main() {
"#, "#,
); );
} }
#[test]
fn offset_of() {
check_types(
r#"
fn main() {
builtin#offset_of((,), 0);
// ^^^^^^^^^^^^^^^^^^^^^^^^^ usize
}
"#,
);
}
#[test]
fn builtin_format_args() {
check(
r#"
//- minicore: fmt
fn main() {
let are = "are";
let count = 10;
builtin#format_args("hello {count:02} {} friends, we {are:?} {0}{last}", "fancy", last = "!");
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type: Arguments<'_>
}
"#,
);
}

View file

@ -162,16 +162,16 @@ unsafe impl Allocator for Global {}
#[lang = "owned_box"] #[lang = "owned_box"]
#[fundamental] #[fundamental]
pub struct Box<T: ?Sized, A: Allocator = Global>; pub struct Box<T: ?Sized, A: Allocator = Global>(T);
impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Box<U, A>> for Box<T, A> {} impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Box<U, A>> for Box<T, A> {}
fn send() -> Box<dyn Future<Output = ()> + Send + 'static>{ fn send() -> Box<dyn Future<Output = ()> + Send + 'static>{
box async move {} Box(async move {})
} }
fn not_send() -> Box<dyn Future<Output = ()> + 'static> { fn not_send() -> Box<dyn Future<Output = ()> + 'static> {
box async move {} Box(async move {})
} }
"#, "#,
); );
@ -3057,7 +3057,7 @@ impl<T: ?Sized> core::ops::Deref for Box<T> {
fn foo() { fn foo() {
let s = None; let s = None;
let f: Box<dyn FnOnce(&Option<i32>)> = box (|ps| {}); let f: Box<dyn FnOnce(&Option<i32>)> = Box { inner: &mut (|ps| {}) };
f(&s); f(&s);
}"#, }"#,
expect![[r#" expect![[r#"
@ -3068,19 +3068,19 @@ fn foo() {
186..197 '*self.inner': T 186..197 '*self.inner': T
187..191 'self': &Box<T> 187..191 'self': &Box<T>
187..197 'self.inner': *mut T 187..197 'self.inner': *mut T
218..308 '{ ...&s); }': () 218..324 '{ ...&s); }': ()
228..229 's': Option<i32> 228..229 's': Option<i32>
232..236 'None': Option<i32> 232..236 'None': Option<i32>
246..247 'f': Box<dyn FnOnce(&Option<i32>)> 246..247 'f': Box<dyn FnOnce(&Option<i32>)>
281..294 'box (|ps| {})': Box<impl Fn(&Option<i32>)> 281..310 'Box { ... {}) }': Box<dyn FnOnce(&Option<i32>)>
286..293 '|ps| {}': impl Fn(&Option<i32>) 294..308 '&mut (|ps| {})': &mut impl Fn(&Option<i32>)
287..289 'ps': &Option<i32> 300..307 '|ps| {}': impl Fn(&Option<i32>)
291..293 '{}': () 301..303 'ps': &Option<i32>
300..301 'f': Box<dyn FnOnce(&Option<i32>)> 305..307 '{}': ()
300..305 'f(&s)': () 316..317 'f': Box<dyn FnOnce(&Option<i32>)>
302..304 '&s': &Option<i32> 316..321 'f(&s)': ()
303..304 's': Option<i32> 318..320 '&s': &Option<i32>
281..294: expected Box<dyn FnOnce(&Option<i32>)>, got Box<impl Fn(&Option<i32>)> 319..320 's': Option<i32>
"#]], "#]],
); );
} }

View file

@ -1,39 +1,27 @@
//! Attributes & documentation for hir types. //! Attributes & documentation for hir types.
use hir_def::{ use hir_def::{
attr::{AttrsWithOwner, Documentation}, attr::AttrsWithOwner,
item_scope::ItemInNs, item_scope::ItemInNs,
path::{ModPath, Path}, path::{ModPath, Path},
per_ns::Namespace, per_ns::Namespace,
resolver::{HasResolver, Resolver, TypeNs}, resolver::{HasResolver, Resolver, TypeNs},
AssocItemId, AttrDefId, GenericParamId, ModuleDefId, AssocItemId, AttrDefId, ModuleDefId,
}; };
use hir_expand::{hygiene::Hygiene, name::Name}; use hir_expand::{hygiene::Hygiene, name::Name};
use hir_ty::db::HirDatabase; use hir_ty::db::HirDatabase;
use syntax::{ast, AstNode}; use syntax::{ast, AstNode};
use crate::{ use crate::{
Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, Enum, ExternCrateDecl, Field, Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl,
Function, GenericParam, Impl, LifetimeParam, Macro, Module, ModuleDef, Static, Struct, Trait, Field, Function, GenericParam, Impl, LifetimeParam, Macro, Module, ModuleDef, Static, Struct,
TraitAlias, TypeAlias, TypeParam, Union, Variant, VariantDef, Trait, TraitAlias, TypeAlias, TypeParam, Union, Variant, VariantDef,
}; };
pub trait HasAttrs { pub trait HasAttrs {
fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner; fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner;
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation>; #[doc(hidden)]
fn resolve_doc_path( fn attr_id(self) -> AttrDefId;
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<Namespace>,
) -> Option<DocLinkDef>;
}
/// Subset of `ide_db::Definition` that doc links can resolve to.
pub enum DocLinkDef {
ModuleDef(ModuleDef),
Field(Field),
SelfType(Trait),
} }
macro_rules! impl_has_attrs { macro_rules! impl_has_attrs {
@ -43,18 +31,8 @@ macro_rules! impl_has_attrs {
let def = AttrDefId::$def_id(self.into()); let def = AttrDefId::$def_id(self.into());
db.attrs_with_owner(def) db.attrs_with_owner(def)
} }
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> { fn attr_id(self) -> AttrDefId {
let def = AttrDefId::$def_id(self.into()); AttrDefId::$def_id(self.into())
db.attrs(def).docs()
}
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<Namespace>
) -> Option<DocLinkDef> {
let def = AttrDefId::$def_id(self.into());
resolve_doc_path(db, def, link, ns)
} }
} }
)*}; )*};
@ -74,6 +52,7 @@ impl_has_attrs![
(Module, ModuleId), (Module, ModuleId),
(GenericParam, GenericParamId), (GenericParam, GenericParamId),
(Impl, ImplId), (Impl, ImplId),
(ExternCrateDecl, ExternCrateId),
]; ];
macro_rules! impl_has_attrs_enum { macro_rules! impl_has_attrs_enum {
@ -82,16 +61,8 @@ macro_rules! impl_has_attrs_enum {
fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner { fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
$enum::$variant(self).attrs(db) $enum::$variant(self).attrs(db)
} }
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> { fn attr_id(self) -> AttrDefId {
$enum::$variant(self).docs(db) $enum::$variant(self).attr_id()
}
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<Namespace>
) -> Option<DocLinkDef> {
$enum::$variant(self).resolve_doc_path(db, link, ns)
} }
} }
)*}; )*};
@ -108,70 +79,35 @@ impl HasAttrs for AssocItem {
AssocItem::TypeAlias(it) => it.attrs(db), AssocItem::TypeAlias(it) => it.attrs(db),
} }
} }
fn attr_id(self) -> AttrDefId {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
match self { match self {
AssocItem::Function(it) => it.docs(db), AssocItem::Function(it) => it.attr_id(),
AssocItem::Const(it) => it.docs(db), AssocItem::Const(it) => it.attr_id(),
AssocItem::TypeAlias(it) => it.docs(db), AssocItem::TypeAlias(it) => it.attr_id(),
} }
} }
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<Namespace>,
) -> Option<DocLinkDef> {
match self {
AssocItem::Function(it) => it.resolve_doc_path(db, link, ns),
AssocItem::Const(it) => it.resolve_doc_path(db, link, ns),
AssocItem::TypeAlias(it) => it.resolve_doc_path(db, link, ns),
}
}
}
impl HasAttrs for ExternCrateDecl {
fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
let def = AttrDefId::ExternCrateId(self.into());
db.attrs_with_owner(def)
}
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
let crate_docs = self.resolved_crate(db)?.root_module().attrs(db).docs().map(String::from);
let def = AttrDefId::ExternCrateId(self.into());
let decl_docs = db.attrs(def).docs().map(String::from);
match (decl_docs, crate_docs) {
(None, None) => None,
(Some(decl_docs), None) => Some(decl_docs),
(None, Some(crate_docs)) => Some(crate_docs),
(Some(mut decl_docs), Some(crate_docs)) => {
decl_docs.push('\n');
decl_docs.push('\n');
decl_docs += &crate_docs;
Some(decl_docs)
}
}
.map(Documentation::new)
}
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<Namespace>,
) -> Option<DocLinkDef> {
let def = AttrDefId::ExternCrateId(self.into());
resolve_doc_path(db, def, link, ns)
}
} }
/// Resolves the item `link` points to in the scope of `def`. /// Resolves the item `link` points to in the scope of `def`.
fn resolve_doc_path( pub fn resolve_doc_path_on(
db: &dyn HirDatabase, db: &dyn HirDatabase,
def: AttrDefId, def: impl HasAttrs,
link: &str, link: &str,
ns: Option<Namespace>, ns: Option<Namespace>,
) -> Option<DocLinkDef> { ) -> Option<DocLinkDef> {
let resolver = match def { // AttrDefId::FieldId(it) => it.parent.resolver(db.upcast()),
// AttrDefId::EnumVariantId(it) => it.parent.resolver(db.upcast()),
resolve_doc_path_on_(db, link, def.attr_id(), ns)
}
fn resolve_doc_path_on_(
db: &dyn HirDatabase,
link: &str,
attr_id: AttrDefId,
ns: Option<Namespace>,
) -> Option<DocLinkDef> {
let resolver = match attr_id {
AttrDefId::ModuleId(it) => it.resolver(db.upcast()), AttrDefId::ModuleId(it) => it.resolver(db.upcast()),
AttrDefId::FieldId(it) => it.parent.resolver(db.upcast()), AttrDefId::FieldId(it) => it.parent.resolver(db.upcast()),
AttrDefId::AdtId(it) => it.resolver(db.upcast()), AttrDefId::AdtId(it) => it.resolver(db.upcast()),
@ -187,12 +123,7 @@ fn resolve_doc_path(
AttrDefId::UseId(it) => it.resolver(db.upcast()), AttrDefId::UseId(it) => it.resolver(db.upcast()),
AttrDefId::MacroId(it) => it.resolver(db.upcast()), AttrDefId::MacroId(it) => it.resolver(db.upcast()),
AttrDefId::ExternCrateId(it) => it.resolver(db.upcast()), AttrDefId::ExternCrateId(it) => it.resolver(db.upcast()),
AttrDefId::GenericParamId(it) => match it { AttrDefId::GenericParamId(_) => return None,
GenericParamId::TypeParamId(it) => it.parent(),
GenericParamId::ConstParamId(it) => it.parent(),
GenericParamId::LifetimeParamId(it) => it.parent,
}
.resolver(db.upcast()),
}; };
let mut modpath = modpath_from_str(db, link)?; let mut modpath = modpath_from_str(db, link)?;

View file

@ -43,6 +43,7 @@ diagnostics![
MacroExpansionParseError, MacroExpansionParseError,
MalformedDerive, MalformedDerive,
MismatchedArgCount, MismatchedArgCount,
MismatchedTupleStructPatArgCount,
MissingFields, MissingFields,
MissingMatchArms, MissingMatchArms,
MissingUnsafe, MissingUnsafe,
@ -172,7 +173,8 @@ pub struct MalformedDerive {
#[derive(Debug)] #[derive(Debug)]
pub struct NoSuchField { pub struct NoSuchField {
pub field: InFile<AstPtr<ast::RecordExprField>>, pub field: InFile<Either<AstPtr<ast::RecordExprField>, AstPtr<ast::RecordPatField>>>,
pub private: bool,
} }
#[derive(Debug)] #[derive(Debug)]
@ -182,6 +184,13 @@ pub struct PrivateAssocItem {
pub item: AssocItem, pub item: AssocItem,
} }
#[derive(Debug)]
pub struct MismatchedTupleStructPatArgCount {
pub expr_or_pat: InFile<Either<AstPtr<ast::Expr>, AstPtr<ast::Pat>>>,
pub expected: usize,
pub found: usize,
}
#[derive(Debug)] #[derive(Debug)]
pub struct ExpectedFunction { pub struct ExpectedFunction {
pub call: InFile<AstPtr<ast::Expr>>, pub call: InFile<AstPtr<ast::Expr>>,

View file

@ -88,13 +88,14 @@ use triomphe::Arc;
use crate::db::{DefDatabase, HirDatabase}; use crate::db::{DefDatabase, HirDatabase};
pub use crate::{ pub use crate::{
attrs::{DocLinkDef, HasAttrs}, attrs::{resolve_doc_path_on, HasAttrs},
diagnostics::{ diagnostics::{
AnyDiagnostic, BreakOutsideOfLoop, CaseType, ExpectedFunction, InactiveCode, AnyDiagnostic, BreakOutsideOfLoop, CaseType, ExpectedFunction, InactiveCode,
IncoherentImpl, IncorrectCase, InvalidDeriveTarget, MacroDefError, MacroError, IncoherentImpl, IncorrectCase, InvalidDeriveTarget, MacroDefError, MacroError,
MacroExpansionParseError, MalformedDerive, MismatchedArgCount, MissingFields, MacroExpansionParseError, MalformedDerive, MismatchedArgCount,
MissingMatchArms, MissingUnsafe, MovedOutOfRef, NeedMut, NoSuchField, PrivateAssocItem, MismatchedTupleStructPatArgCount, MissingFields, MissingMatchArms, MissingUnsafe,
PrivateField, ReplaceFilterMapNextWithFindMap, TypeMismatch, TypedHole, UndeclaredLabel, MovedOutOfRef, NeedMut, NoSuchField, PrivateAssocItem, PrivateField,
ReplaceFilterMapNextWithFindMap, TypeMismatch, TypedHole, UndeclaredLabel,
UnimplementedBuiltinMacro, UnreachableLabel, UnresolvedExternCrate, UnresolvedField, UnimplementedBuiltinMacro, UnreachableLabel, UnresolvedExternCrate, UnresolvedField,
UnresolvedImport, UnresolvedMacroCall, UnresolvedMethodCall, UnresolvedModule, UnresolvedImport, UnresolvedMacroCall, UnresolvedMethodCall, UnresolvedModule,
UnresolvedProcMacro, UnusedMut, UnresolvedProcMacro, UnusedMut,
@ -115,7 +116,7 @@ pub use crate::{
pub use { pub use {
cfg::{CfgAtom, CfgExpr, CfgOptions}, cfg::{CfgAtom, CfgExpr, CfgOptions},
hir_def::{ hir_def::{
attr::{builtin::AttributeTemplate, Attrs, AttrsWithOwner, Documentation}, attr::{builtin::AttributeTemplate, AttrSourceMap, Attrs, AttrsWithOwner},
data::adt::StructKind, data::adt::StructKind,
find_path::PrefixKind, find_path::PrefixKind,
import_map, import_map,
@ -130,7 +131,7 @@ pub use {
{AdtId, ModuleDefId}, {AdtId, ModuleDefId},
}, },
hir_expand::{ hir_expand::{
attrs::Attr, attrs::{Attr, AttrId},
name::{known, Name}, name::{known, Name},
ExpandResult, HirFileId, InFile, MacroFile, Origin, ExpandResult, HirFileId, InFile, MacroFile, Origin,
}, },
@ -563,8 +564,8 @@ impl Module {
emit_def_diagnostic(db, acc, diag); emit_def_diagnostic(db, acc, diag);
} }
for decl in self.declarations(db) { for def in self.declarations(db) {
match decl { match def {
ModuleDef::Module(m) => { ModuleDef::Module(m) => {
// Only add diagnostics from inline modules // Only add diagnostics from inline modules
if def_map[m.id.local_id].origin.is_inline() { if def_map[m.id.local_id].origin.is_inline() {
@ -575,7 +576,7 @@ impl Module {
for diag in db.trait_data_with_diagnostics(t.id).1.iter() { for diag in db.trait_data_with_diagnostics(t.id).1.iter() {
emit_def_diagnostic(db, acc, diag); emit_def_diagnostic(db, acc, diag);
} }
acc.extend(decl.diagnostics(db)) acc.extend(def.diagnostics(db))
} }
ModuleDef::Adt(adt) => { ModuleDef::Adt(adt) => {
match adt { match adt {
@ -599,10 +600,10 @@ impl Module {
} }
} }
} }
acc.extend(decl.diagnostics(db)) acc.extend(def.diagnostics(db))
} }
ModuleDef::Macro(m) => emit_macro_def_diagnostics(db, acc, m), ModuleDef::Macro(m) => emit_macro_def_diagnostics(db, acc, m),
_ => acc.extend(decl.diagnostics(db)), _ => acc.extend(def.diagnostics(db)),
} }
} }
self.legacy_macros(db).into_iter().for_each(|m| emit_macro_def_diagnostics(db, acc, m)); self.legacy_macros(db).into_iter().for_each(|m| emit_macro_def_diagnostics(db, acc, m));
@ -1446,6 +1447,7 @@ impl DefWithBody {
} }
pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) { pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
db.unwind_if_cancelled();
let krate = self.module(db).id.krate(); let krate = self.module(db).id.krate();
let (body, source_map) = db.body_with_source_map(self.into()); let (body, source_map) = db.body_with_source_map(self.into());
@ -1501,11 +1503,19 @@ impl DefWithBody {
let infer = db.infer(self.into()); let infer = db.infer(self.into());
let source_map = Lazy::new(|| db.body_with_source_map(self.into()).1); let source_map = Lazy::new(|| db.body_with_source_map(self.into()).1);
let expr_syntax = |expr| source_map.expr_syntax(expr).expect("unexpected synthetic"); let expr_syntax = |expr| source_map.expr_syntax(expr).expect("unexpected synthetic");
let pat_syntax = |pat| source_map.pat_syntax(pat).expect("unexpected synthetic");
for d in &infer.diagnostics { for d in &infer.diagnostics {
match d { match d {
&hir_ty::InferenceDiagnostic::NoSuchField { expr } => { &hir_ty::InferenceDiagnostic::NoSuchField { field: expr, private } => {
let field = source_map.field_syntax(expr); let expr_or_pat = match expr {
acc.push(NoSuchField { field }.into()) ExprOrPatId::ExprId(expr) => {
source_map.field_syntax(expr).map(Either::Left)
}
ExprOrPatId::PatId(pat) => {
source_map.pat_field_syntax(pat).map(Either::Right)
}
};
acc.push(NoSuchField { field: expr_or_pat, private }.into())
} }
&hir_ty::InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => { &hir_ty::InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
acc.push( acc.push(
@ -1521,10 +1531,7 @@ impl DefWithBody {
&hir_ty::InferenceDiagnostic::PrivateAssocItem { id, item } => { &hir_ty::InferenceDiagnostic::PrivateAssocItem { id, item } => {
let expr_or_pat = match id { let expr_or_pat = match id {
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(Either::Left), ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(Either::Left),
ExprOrPatId::PatId(pat) => source_map ExprOrPatId::PatId(pat) => pat_syntax(pat).map(Either::Right),
.pat_syntax(pat)
.expect("unexpected synthetic")
.map(Either::Right),
}; };
let item = item.into(); let item = item.into();
acc.push(PrivateAssocItem { expr_or_pat, item }.into()) acc.push(PrivateAssocItem { expr_or_pat, item }.into())
@ -1596,6 +1603,23 @@ impl DefWithBody {
.into(), .into(),
) )
} }
&hir_ty::InferenceDiagnostic::MismatchedTupleStructPatArgCount {
pat,
expected,
found,
} => {
let expr_or_pat = match pat {
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(Either::Left),
ExprOrPatId::PatId(pat) => source_map
.pat_syntax(pat)
.expect("unexpected synthetic")
.map(|it| it.unwrap_left())
.map(Either::Right),
};
acc.push(
MismatchedTupleStructPatArgCount { expr_or_pat, expected, found }.into(),
)
}
} }
} }
for (pat_or_expr, mismatch) in infer.type_mismatches() { for (pat_or_expr, mismatch) in infer.type_mismatches() {
@ -4838,3 +4862,10 @@ pub enum ItemContainer {
ExternBlock(), ExternBlock(),
Crate(CrateId), Crate(CrateId),
} }
/// Subset of `ide_db::Definition` that doc links can resolve to.
pub enum DocLinkDef {
ModuleDef(ModuleDef),
Field(Field),
SelfType(Trait),
}

View file

@ -127,165 +127,24 @@ impl<DB> fmt::Debug for Semantics<'_, DB> {
} }
} }
impl<'db, DB> ops::Deref for Semantics<'db, DB> {
type Target = SemanticsImpl<'db>;
fn deref(&self) -> &Self::Target {
&self.imp
}
}
impl<'db, DB: HirDatabase> Semantics<'db, DB> { impl<'db, DB: HirDatabase> Semantics<'db, DB> {
pub fn new(db: &DB) -> Semantics<'_, DB> { pub fn new(db: &DB) -> Semantics<'_, DB> {
let impl_ = SemanticsImpl::new(db); let impl_ = SemanticsImpl::new(db);
Semantics { db, imp: impl_ } Semantics { db, imp: impl_ }
} }
pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
self.imp.parse(file_id)
}
pub fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode {
self.imp.parse_or_expand(file_id)
}
pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
self.imp.expand(macro_call)
}
/// If `item` has an attribute macro attached to it, expands it.
pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
self.imp.expand_attr_macro(item)
}
pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
self.imp.expand_derive_as_pseudo_attr_macro(attr)
}
pub fn resolve_derive_macro(&self, derive: &ast::Attr) -> Option<Vec<Option<Macro>>> {
self.imp.resolve_derive_macro(derive)
}
pub fn expand_derive_macro(&self, derive: &ast::Attr) -> Option<Vec<SyntaxNode>> {
self.imp.expand_derive_macro(derive)
}
pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
self.imp.is_attr_macro_call(item)
}
pub fn is_derive_annotated(&self, item: &ast::Adt) -> bool {
self.imp.is_derive_annotated(item)
}
/// Expand the macro call with a different token tree, mapping the `token_to_map` down into the
/// expansion. `token_to_map` should be a token from the `speculative args` node.
pub fn speculative_expand(
&self,
actual_macro_call: &ast::MacroCall,
speculative_args: &ast::TokenTree,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map)
}
/// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the
/// expansion. `token_to_map` should be a token from the `speculative args` node.
pub fn speculative_expand_attr_macro(
&self,
actual_macro_call: &ast::Item,
speculative_args: &ast::Item,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map)
}
pub fn speculative_expand_derive_as_pseudo_attr_macro(
&self,
actual_macro_call: &ast::Attr,
speculative_args: &ast::Attr,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
self.imp.speculative_expand_derive_as_pseudo_attr_macro(
actual_macro_call,
speculative_args,
token_to_map,
)
}
/// Descend the token into its macro call if it is part of one, returning the token in the
/// expansion that it is associated with. If `offset` points into the token's range, it will
/// be considered for the mapping in case of inline format args.
pub fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken {
self.imp.descend_into_macros_single(token, offset)
}
/// Descend the token into its macro call if it is part of one, returning the tokens in the
/// expansion that it is associated with. If `offset` points into the token's range, it will
/// be considered for the mapping in case of inline format args.
pub fn descend_into_macros(
&self,
token: SyntaxToken,
offset: TextSize,
) -> SmallVec<[SyntaxToken; 1]> {
self.imp.descend_into_macros(token, offset)
}
/// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token.
///
/// Returns the original non descended token if none of the mapped counterparts have the same text.
pub fn descend_into_macros_with_same_text(
&self,
token: SyntaxToken,
offset: TextSize,
) -> SmallVec<[SyntaxToken; 1]> {
self.imp.descend_into_macros_with_same_text(token, offset)
}
pub fn descend_into_macros_with_kind_preference(
&self,
token: SyntaxToken,
offset: TextSize,
) -> SyntaxToken {
self.imp.descend_into_macros_with_kind_preference(token, offset)
}
/// Maps a node down by mapping its first and last token down.
pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
self.imp.descend_node_into_attributes(node)
}
/// Search for a definition's source and cache its syntax tree
pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
where
Def::Ast: AstNode,
{
self.imp.source(def)
}
pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId { pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId {
self.imp.find_file(syntax_node).file_id self.imp.find_file(syntax_node).file_id
} }
/// Attempts to map the node out of macro expanded files returning the original file range.
/// If upmapping is not possible, this will fall back to the range of the macro call of the
/// macro file the node resides in.
pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
self.imp.original_range(node)
}
/// Attempts to map the node out of macro expanded files returning the original file range.
pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
self.imp.original_range_opt(node)
}
/// Attempts to map the node out of macro expanded files.
/// This only work for attribute expansions, as other ones do not have nodes as input.
pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
self.imp.original_ast_node(node)
}
/// Attempts to map the node out of macro expanded files.
/// This only work for attribute expansions, as other ones do not have nodes as input.
pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
self.imp.original_syntax_node(node)
}
pub fn diagnostics_display_range(&self, diagnostics: InFile<SyntaxNodePtr>) -> FileRange {
self.imp.diagnostics_display_range(diagnostics)
}
pub fn token_ancestors_with_macros( pub fn token_ancestors_with_macros(
&self, &self,
token: SyntaxToken, token: SyntaxToken,
@ -293,19 +152,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it)) token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it))
} }
/// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
self.imp.ancestors_with_macros(node)
}
pub fn ancestors_at_offset_with_macros(
&self,
node: &SyntaxNode,
offset: TextSize,
) -> impl Iterator<Item = SyntaxNode> + '_ {
self.imp.ancestors_at_offset_with_macros(node, offset)
}
/// Find an AstNode by offset inside SyntaxNode, if it is inside *Macrofile*, /// Find an AstNode by offset inside SyntaxNode, if it is inside *Macrofile*,
/// search up until it is of the target AstNode type /// search up until it is of the target AstNode type
pub fn find_node_at_offset_with_macros<N: AstNode>( pub fn find_node_at_offset_with_macros<N: AstNode>(
@ -336,53 +182,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast)) self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
} }
pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
self.imp.resolve_lifetime_param(lifetime)
}
pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
self.imp.resolve_label(lifetime)
}
pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
self.imp.resolve_type(ty)
}
pub fn resolve_trait(&self, trait_: &ast::Path) -> Option<Trait> {
self.imp.resolve_trait(trait_)
}
pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment>> {
self.imp.expr_adjustments(expr)
}
pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
self.imp.type_of_expr(expr)
}
pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
self.imp.type_of_pat(pat)
}
/// It also includes the changes that binding mode makes in the type. For example in
/// `let ref x @ Some(_) = None` the result of `type_of_pat` is `Option<T>` but the result
/// of this function is `&mut Option<T>`
pub fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type> {
self.imp.type_of_binding_in_pat(pat)
}
pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
self.imp.type_of_self(param)
}
pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
self.imp.pattern_adjustments(pat)
}
pub fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
self.imp.binding_mode_of_pat(pat)
}
pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
self.imp.resolve_method_call(call).map(Function::from) self.imp.resolve_method_call(call).map(Function::from)
} }
@ -417,61 +216,10 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.resolve_try_expr(try_expr).map(Function::from) self.imp.resolve_try_expr(try_expr).map(Function::from)
} }
pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
self.imp.resolve_method_call_as_callable(call)
}
pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
self.imp.resolve_field(field)
}
pub fn resolve_record_field(
&self,
field: &ast::RecordExprField,
) -> Option<(Field, Option<Local>, Type)> {
self.imp.resolve_record_field(field)
}
pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<(Field, Type)> {
self.imp.resolve_record_pat_field(field)
}
pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
self.imp.resolve_macro_call(macro_call)
}
pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
self.imp.is_unsafe_macro_call(macro_call)
}
pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
self.imp.resolve_attr_macro_call(item)
}
pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
self.imp.resolve_path(path)
}
pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> { pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> {
self.imp.resolve_variant(record_lit).map(VariantDef::from) self.imp.resolve_variant(record_lit).map(VariantDef::from)
} }
pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
self.imp.resolve_bind_pat_to_const(pat)
}
pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
self.imp.record_literal_missing_fields(literal)
}
pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
self.imp.record_pattern_missing_fields(pattern)
}
pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
self.imp.to_def(src)
}
pub fn to_module_def(&self, file: FileId) -> Option<Module> { pub fn to_module_def(&self, file: FileId) -> Option<Module> {
self.imp.to_module_def(file).next() self.imp.to_module_def(file).next()
} }
@ -479,39 +227,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
pub fn to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> { pub fn to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
self.imp.to_module_def(file) self.imp.to_module_def(file)
} }
pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
self.imp.scope(node)
}
pub fn scope_at_offset(
&self,
node: &SyntaxNode,
offset: TextSize,
) -> Option<SemanticsScope<'db>> {
self.imp.scope_at_offset(node, offset)
}
pub fn assert_contains_node(&self, node: &SyntaxNode) {
self.imp.assert_contains_node(node)
}
pub fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool {
self.imp.is_unsafe_method_call(method_call_expr)
}
pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
self.imp.is_unsafe_ref_expr(ref_expr)
}
pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
self.imp.is_unsafe_ident_pat(ident_pat)
}
/// Returns `true` if the `node` is inside an `unsafe` context.
pub fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool {
self.imp.is_inside_unsafe(expr)
}
} }
impl<'db> SemanticsImpl<'db> { impl<'db> SemanticsImpl<'db> {
@ -525,32 +240,33 @@ impl<'db> SemanticsImpl<'db> {
} }
} }
fn parse(&self, file_id: FileId) -> ast::SourceFile { pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
let tree = self.db.parse(file_id).tree(); let tree = self.db.parse(file_id).tree();
self.cache(tree.syntax().clone(), file_id.into()); self.cache(tree.syntax().clone(), file_id.into());
tree tree
} }
fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode { pub fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode {
let node = self.db.parse_or_expand(file_id); let node = self.db.parse_or_expand(file_id);
self.cache(node.clone(), file_id); self.cache(node.clone(), file_id);
node node
} }
fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
let sa = self.analyze_no_infer(macro_call.syntax())?; let sa = self.analyze_no_infer(macro_call.syntax())?;
let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?; let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
let node = self.parse_or_expand(file_id); let node = self.parse_or_expand(file_id);
Some(node) Some(node)
} }
fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> { /// If `item` has an attribute macro attached to it, expands it.
pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
let src = self.wrap_node_infile(item.clone()); let src = self.wrap_node_infile(item.clone());
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?; let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?;
Some(self.parse_or_expand(macro_call_id.as_file())) Some(self.parse_or_expand(macro_call_id.as_file()))
} }
fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> { pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
let adt = attr.syntax().parent().and_then(ast::Adt::cast)?; let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
let src = self.wrap_node_infile(attr.clone()); let src = self.wrap_node_infile(attr.clone());
let call_id = self.with_ctx(|ctx| { let call_id = self.with_ctx(|ctx| {
@ -559,7 +275,7 @@ impl<'db> SemanticsImpl<'db> {
Some(self.parse_or_expand(call_id.as_file())) Some(self.parse_or_expand(call_id.as_file()))
} }
fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> { pub fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
let calls = self.derive_macro_calls(attr)?; let calls = self.derive_macro_calls(attr)?;
self.with_ctx(|ctx| { self.with_ctx(|ctx| {
Some( Some(
@ -573,7 +289,7 @@ impl<'db> SemanticsImpl<'db> {
}) })
} }
fn expand_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<SyntaxNode>> { pub fn expand_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<SyntaxNode>> {
let res: Vec<_> = self let res: Vec<_> = self
.derive_macro_calls(attr)? .derive_macro_calls(attr)?
.into_iter() .into_iter()
@ -598,19 +314,21 @@ impl<'db> SemanticsImpl<'db> {
}) })
} }
fn is_derive_annotated(&self, adt: &ast::Adt) -> bool { pub fn is_derive_annotated(&self, adt: &ast::Adt) -> bool {
let file_id = self.find_file(adt.syntax()).file_id; let file_id = self.find_file(adt.syntax()).file_id;
let adt = InFile::new(file_id, adt); let adt = InFile::new(file_id, adt);
self.with_ctx(|ctx| ctx.has_derives(adt)) self.with_ctx(|ctx| ctx.has_derives(adt))
} }
fn is_attr_macro_call(&self, item: &ast::Item) -> bool { pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
let file_id = self.find_file(item.syntax()).file_id; let file_id = self.find_file(item.syntax()).file_id;
let src = InFile::new(file_id, item.clone()); let src = InFile::new(file_id, item.clone());
self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some()) self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some())
} }
fn speculative_expand( /// Expand the macro call with a different token tree, mapping the `token_to_map` down into the
/// expansion. `token_to_map` should be a token from the `speculative args` node.
pub fn speculative_expand(
&self, &self,
actual_macro_call: &ast::MacroCall, actual_macro_call: &ast::MacroCall,
speculative_args: &ast::TokenTree, speculative_args: &ast::TokenTree,
@ -633,7 +351,9 @@ impl<'db> SemanticsImpl<'db> {
) )
} }
fn speculative_expand_attr( /// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the
/// expansion. `token_to_map` should be a token from the `speculative args` node.
pub fn speculative_expand_attr_macro(
&self, &self,
actual_macro_call: &ast::Item, actual_macro_call: &ast::Item,
speculative_args: &ast::Item, speculative_args: &ast::Item,
@ -649,7 +369,7 @@ impl<'db> SemanticsImpl<'db> {
) )
} }
fn speculative_expand_derive_as_pseudo_attr_macro( pub fn speculative_expand_derive_as_pseudo_attr_macro(
&self, &self,
actual_macro_call: &ast::Attr, actual_macro_call: &ast::Attr,
speculative_args: &ast::Attr, speculative_args: &ast::Attr,
@ -668,8 +388,9 @@ impl<'db> SemanticsImpl<'db> {
) )
} }
/// Maps a node down by mapping its first and last token down.
pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
// This might not be the correct way to do this, but it works for now // This might not be the correct way to do this, but it works for now
fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
let mut res = smallvec![]; let mut res = smallvec![];
let tokens = (|| { let tokens = (|| {
let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?; let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?;
@ -723,7 +444,10 @@ impl<'db> SemanticsImpl<'db> {
res res
} }
fn descend_into_macros( /// Descend the token into its macro call if it is part of one, returning the tokens in the
/// expansion that it is associated with. If `offset` points into the token's range, it will
/// be considered for the mapping in case of inline format args.
pub fn descend_into_macros(
&self, &self,
token: SyntaxToken, token: SyntaxToken,
offset: TextSize, offset: TextSize,
@ -736,7 +460,10 @@ impl<'db> SemanticsImpl<'db> {
res res
} }
fn descend_into_macros_with_same_text( /// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token.
///
/// Returns the original non descended token if none of the mapped counterparts have the same text.
pub fn descend_into_macros_with_same_text(
&self, &self,
token: SyntaxToken, token: SyntaxToken,
offset: TextSize, offset: TextSize,
@ -755,7 +482,7 @@ impl<'db> SemanticsImpl<'db> {
res res
} }
fn descend_into_macros_with_kind_preference( pub fn descend_into_macros_with_kind_preference(
&self, &self,
token: SyntaxToken, token: SyntaxToken,
offset: TextSize, offset: TextSize,
@ -785,7 +512,10 @@ impl<'db> SemanticsImpl<'db> {
res.unwrap_or(token) res.unwrap_or(token)
} }
fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken { /// Descend the token into its macro call if it is part of one, returning the token in the
/// expansion that it is associated with. If `offset` points into the token's range, it will
/// be considered for the mapping in case of inline format args.
pub fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken {
let mut res = token.clone(); let mut res = token.clone();
self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| { self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
res = value; res = value;
@ -995,17 +725,23 @@ impl<'db> SemanticsImpl<'db> {
}) })
} }
fn original_range(&self, node: &SyntaxNode) -> FileRange { /// Attempts to map the node out of macro expanded files returning the original file range.
/// If upmapping is not possible, this will fall back to the range of the macro call of the
/// macro file the node resides in.
pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
let node = self.find_file(node); let node = self.find_file(node);
node.original_file_range(self.db.upcast()) node.original_file_range(self.db.upcast())
} }
fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> { /// Attempts to map the node out of macro expanded files returning the original file range.
pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
let node = self.find_file(node); let node = self.find_file(node);
node.original_file_range_opt(self.db.upcast()) node.original_file_range_opt(self.db.upcast())
} }
fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> { /// Attempts to map the node out of macro expanded files.
/// This only work for attribute expansions, as other ones do not have nodes as input.
pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map( self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map(
|InFile { file_id, value }| { |InFile { file_id, value }| {
self.cache(find_root(value.syntax()), file_id); self.cache(find_root(value.syntax()), file_id);
@ -1014,7 +750,9 @@ impl<'db> SemanticsImpl<'db> {
) )
} }
fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> { /// Attempts to map the node out of macro expanded files.
/// This only work for attribute expansions, as other ones do not have nodes as input.
pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
let InFile { file_id, .. } = self.find_file(node); let InFile { file_id, .. } = self.find_file(node);
InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map( InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map(
|InFile { file_id, value }| { |InFile { file_id, value }| {
@ -1024,7 +762,7 @@ impl<'db> SemanticsImpl<'db> {
) )
} }
fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange { pub fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
let root = self.parse_or_expand(src.file_id); let root = self.parse_or_expand(src.file_id);
let node = src.map(|it| it.to_node(&root)); let node = src.map(|it| it.to_node(&root));
node.as_ref().original_file_range(self.db.upcast()) node.as_ref().original_file_range(self.db.upcast())
@ -1037,7 +775,8 @@ impl<'db> SemanticsImpl<'db> {
token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent)) token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent))
} }
fn ancestors_with_macros( /// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
pub fn ancestors_with_macros(
&self, &self,
node: SyntaxNode, node: SyntaxNode,
) -> impl Iterator<Item = SyntaxNode> + Clone + '_ { ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
@ -1055,7 +794,7 @@ impl<'db> SemanticsImpl<'db> {
.map(|it| it.value) .map(|it| it.value)
} }
fn ancestors_at_offset_with_macros( pub fn ancestors_at_offset_with_macros(
&self, &self,
node: &SyntaxNode, node: &SyntaxNode,
offset: TextSize, offset: TextSize,
@ -1065,7 +804,7 @@ impl<'db> SemanticsImpl<'db> {
.kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
} }
fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> { pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
let text = lifetime.text(); let text = lifetime.text();
let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| { let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| {
let gpl = ast::AnyHasGenericParams::cast(syn)?.generic_param_list()?; let gpl = ast::AnyHasGenericParams::cast(syn)?.generic_param_list()?;
@ -1076,7 +815,7 @@ impl<'db> SemanticsImpl<'db> {
ToDef::to_def(self, src) ToDef::to_def(self, src)
} }
fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> { pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
let text = lifetime.text(); let text = lifetime.text();
let label = lifetime.syntax().ancestors().find_map(|syn| { let label = lifetime.syntax().ancestors().find_map(|syn| {
let label = match_ast! { let label = match_ast! {
@ -1098,7 +837,7 @@ impl<'db> SemanticsImpl<'db> {
ToDef::to_def(self, src) ToDef::to_def(self, src)
} }
fn resolve_type(&self, ty: &ast::Type) -> Option<Type> { pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
let analyze = self.analyze(ty.syntax())?; let analyze = self.analyze(ty.syntax())?;
let ctx = LowerCtx::with_file_id(self.db.upcast(), analyze.file_id); let ctx = LowerCtx::with_file_id(self.db.upcast(), analyze.file_id);
let ty = hir_ty::TyLoweringContext::new( let ty = hir_ty::TyLoweringContext::new(
@ -1110,7 +849,7 @@ impl<'db> SemanticsImpl<'db> {
Some(Type::new_with_resolver(self.db, &analyze.resolver, ty)) Some(Type::new_with_resolver(self.db, &analyze.resolver, ty))
} }
fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> { pub fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
let analyze = self.analyze(path.syntax())?; let analyze = self.analyze(path.syntax())?;
let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id); let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id);
let ctx = LowerCtx::with_hygiene(self.db.upcast(), &hygiene); let ctx = LowerCtx::with_hygiene(self.db.upcast(), &hygiene);
@ -1121,7 +860,7 @@ impl<'db> SemanticsImpl<'db> {
} }
} }
fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment>> { pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment>> {
let mutability = |m| match m { let mutability = |m| match m {
hir_ty::Mutability::Not => Mutability::Shared, hir_ty::Mutability::Not => Mutability::Shared,
hir_ty::Mutability::Mut => Mutability::Mut, hir_ty::Mutability::Mut => Mutability::Mut,
@ -1165,33 +904,36 @@ impl<'db> SemanticsImpl<'db> {
}) })
} }
fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> { pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
self.analyze(expr.syntax())? self.analyze(expr.syntax())?
.type_of_expr(self.db, expr) .type_of_expr(self.db, expr)
.map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced }) .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
} }
fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> { pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
self.analyze(pat.syntax())? self.analyze(pat.syntax())?
.type_of_pat(self.db, pat) .type_of_pat(self.db, pat)
.map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced }) .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
} }
fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type> { /// It also includes the changes that binding mode makes in the type. For example in
/// `let ref x @ Some(_) = None` the result of `type_of_pat` is `Option<T>` but the result
/// of this function is `&mut Option<T>`
pub fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type> {
self.analyze(pat.syntax())?.type_of_binding_in_pat(self.db, pat) self.analyze(pat.syntax())?.type_of_binding_in_pat(self.db, pat)
} }
fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> { pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
self.analyze(param.syntax())?.type_of_self(self.db, param) self.analyze(param.syntax())?.type_of_self(self.db, param)
} }
fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> { pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
self.analyze(pat.syntax()) self.analyze(pat.syntax())
.and_then(|it| it.pattern_adjustments(self.db, pat)) .and_then(|it| it.pattern_adjustments(self.db, pat))
.unwrap_or_default() .unwrap_or_default()
} }
fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> { pub fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat) self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
} }
@ -1226,32 +968,32 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(try_expr.syntax())?.resolve_try_expr(self.db, try_expr) self.analyze(try_expr.syntax())?.resolve_try_expr(self.db, try_expr)
} }
fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> { pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call) self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call)
} }
fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> { pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
self.analyze(field.syntax())?.resolve_field(self.db, field) self.analyze(field.syntax())?.resolve_field(self.db, field)
} }
fn resolve_record_field( pub fn resolve_record_field(
&self, &self,
field: &ast::RecordExprField, field: &ast::RecordExprField,
) -> Option<(Field, Option<Local>, Type)> { ) -> Option<(Field, Option<Local>, Type)> {
self.analyze(field.syntax())?.resolve_record_field(self.db, field) self.analyze(field.syntax())?.resolve_record_field(self.db, field)
} }
fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<(Field, Type)> { pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<(Field, Type)> {
self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field) self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
} }
fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> { pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
let sa = self.analyze(macro_call.syntax())?; let sa = self.analyze(macro_call.syntax())?;
let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call); let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
sa.resolve_macro_call(self.db, macro_call) sa.resolve_macro_call(self.db, macro_call)
} }
fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool { pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
let sa = match self.analyze(macro_call.syntax()) { let sa = match self.analyze(macro_call.syntax()) {
Some(it) => it, Some(it) => it,
None => return false, None => return false,
@ -1260,7 +1002,7 @@ impl<'db> SemanticsImpl<'db> {
sa.is_unsafe_macro_call(self.db, macro_call) sa.is_unsafe_macro_call(self.db, macro_call)
} }
fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> { pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
let item_in_file = self.wrap_node_infile(item.clone()); let item_in_file = self.wrap_node_infile(item.clone());
let id = self.with_ctx(|ctx| { let id = self.with_ctx(|ctx| {
let macro_call_id = ctx.item_to_macro_call(item_in_file)?; let macro_call_id = ctx.item_to_macro_call(item_in_file)?;
@ -1269,7 +1011,7 @@ impl<'db> SemanticsImpl<'db> {
Some(Macro { id }) Some(Macro { id })
} }
fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> { pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
self.analyze(path.syntax())?.resolve_path(self.db, path) self.analyze(path.syntax())?.resolve_path(self.db, path)
} }
@ -1277,17 +1019,17 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit) self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit)
} }
fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> { pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat) self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat)
} }
fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> { pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
self.analyze(literal.syntax()) self.analyze(literal.syntax())
.and_then(|it| it.record_literal_missing_fields(self.db, literal)) .and_then(|it| it.record_literal_missing_fields(self.db, literal))
.unwrap_or_default() .unwrap_or_default()
} }
fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> { pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
self.analyze(pattern.syntax()) self.analyze(pattern.syntax())
.and_then(|it| it.record_pattern_missing_fields(self.db, pattern)) .and_then(|it| it.record_pattern_missing_fields(self.db, pattern))
.unwrap_or_default() .unwrap_or_default()
@ -1299,7 +1041,7 @@ impl<'db> SemanticsImpl<'db> {
f(&mut ctx) f(&mut ctx)
} }
fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> { pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
let src = self.find_file(src.syntax()).with_value(src).cloned(); let src = self.find_file(src.syntax()).with_value(src).cloned();
T::to_def(self, src) T::to_def(self, src)
} }
@ -1308,7 +1050,7 @@ impl<'db> SemanticsImpl<'db> {
self.with_ctx(|ctx| ctx.file_to_def(file)).into_iter().map(Module::from) self.with_ctx(|ctx| ctx.file_to_def(file)).into_iter().map(Module::from)
} }
fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> { pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
self.analyze_no_infer(node).map(|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope { self.analyze_no_infer(node).map(|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
db: self.db, db: self.db,
file_id, file_id,
@ -1316,7 +1058,11 @@ impl<'db> SemanticsImpl<'db> {
}) })
} }
fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> Option<SemanticsScope<'db>> { pub fn scope_at_offset(
&self,
node: &SyntaxNode,
offset: TextSize,
) -> Option<SemanticsScope<'db>> {
self.analyze_with_offset_no_infer(node, offset).map( self.analyze_with_offset_no_infer(node, offset).map(
|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope { |SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
db: self.db, db: self.db,
@ -1326,7 +1072,8 @@ impl<'db> SemanticsImpl<'db> {
) )
} }
fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>> /// Search for a definition's source and cache its syntax tree
pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
where where
Def::Ast: AstNode, Def::Ast: AstNode,
{ {
@ -1391,7 +1138,7 @@ impl<'db> SemanticsImpl<'db> {
assert!(prev == None || prev == Some(file_id)) assert!(prev == None || prev == Some(file_id))
} }
fn assert_contains_node(&self, node: &SyntaxNode) { pub fn assert_contains_node(&self, node: &SyntaxNode) {
self.find_file(node); self.find_file(node);
} }
@ -1427,7 +1174,7 @@ impl<'db> SemanticsImpl<'db> {
InFile::new(file_id, node) InFile::new(file_id, node)
} }
fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool { pub fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool {
method_call_expr method_call_expr
.receiver() .receiver()
.and_then(|expr| { .and_then(|expr| {
@ -1450,7 +1197,7 @@ impl<'db> SemanticsImpl<'db> {
.unwrap_or(false) .unwrap_or(false)
} }
fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool { pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
ref_expr ref_expr
.expr() .expr()
.and_then(|expr| { .and_then(|expr| {
@ -1469,7 +1216,7 @@ impl<'db> SemanticsImpl<'db> {
// more than it should with the current implementation. // more than it should with the current implementation.
} }
fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool { pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
if ident_pat.ref_token().is_none() { if ident_pat.ref_token().is_none() {
return false; return false;
} }
@ -1512,7 +1259,8 @@ impl<'db> SemanticsImpl<'db> {
.unwrap_or(false) .unwrap_or(false)
} }
fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool { /// Returns `true` if the `node` is inside an `unsafe` context.
pub fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool {
let Some(enclosing_item) = let Some(enclosing_item) =
expr.syntax().ancestors().find_map(Either::<ast::Item, ast::Variant>::cast) expr.syntax().ancestors().find_map(Either::<ast::Item, ast::Variant>::cast)
else { else {

View file

@ -0,0 +1,159 @@
use crate::assist_context::{AssistContext, Assists};
use ide_db::{
assists::{AssistId, AssistKind},
defs::Definition,
LineIndexDatabase,
};
use syntax::{
ast::{self, edit_in_place::Indent},
AstNode,
};
// Assist: bind_unused_param
//
// Binds unused function parameter to an underscore.
//
// ```
// fn some_function(x: i32$0) {}
// ```
// ->
// ```
// fn some_function(x: i32) {
// let _ = x;
// }
// ```
pub(crate) fn bind_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let param: ast::Param = ctx.find_node_at_offset()?;
let Some(ast::Pat::IdentPat(ident_pat)) = param.pat() else { return None };
let param_def = {
let local = ctx.sema.to_def(&ident_pat)?;
Definition::Local(local)
};
if param_def.usages(&ctx.sema).at_least_one() {
cov_mark::hit!(keep_used);
return None;
}
let func = param.syntax().ancestors().find_map(ast::Fn::cast)?;
let stmt_list = func.body()?.stmt_list()?;
let l_curly_range = stmt_list.l_curly_token()?.text_range();
let r_curly_range = stmt_list.r_curly_token()?.text_range();
acc.add(
AssistId("bind_unused_param", AssistKind::QuickFix),
&format!("Bind as `let _ = {};`", &ident_pat),
param.syntax().text_range(),
|builder| {
let line_index = ctx.db().line_index(ctx.file_id());
let indent = func.indent_level();
let text_indent = indent + 1;
let mut text = format!("\n{text_indent}let _ = {ident_pat};");
let left_line = line_index.line_col(l_curly_range.end()).line;
let right_line = line_index.line_col(r_curly_range.start()).line;
if left_line == right_line {
cov_mark::hit!(single_line);
text.push_str(&format!("\n{indent}"));
}
builder.insert(l_curly_range.end(), text);
},
)
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
use super::*;
#[test]
fn bind_unused_empty_block() {
cov_mark::check!(single_line);
check_assist(
bind_unused_param,
r#"
fn foo($0y: i32) {}
"#,
r#"
fn foo(y: i32) {
let _ = y;
}
"#,
);
}
#[test]
fn bind_unused_empty_block_with_newline() {
check_assist(
bind_unused_param,
r#"
fn foo($0y: i32) {
}
"#,
r#"
fn foo(y: i32) {
let _ = y;
}
"#,
);
}
#[test]
fn bind_unused_generic() {
check_assist(
bind_unused_param,
r#"
fn foo<T>($0y: T)
where T : Default {
}
"#,
r#"
fn foo<T>(y: T)
where T : Default {
let _ = y;
}
"#,
);
}
#[test]
fn trait_impl() {
check_assist(
bind_unused_param,
r#"
trait Trait {
fn foo(x: i32);
}
impl Trait for () {
fn foo($0x: i32) {}
}
"#,
r#"
trait Trait {
fn foo(x: i32);
}
impl Trait for () {
fn foo(x: i32) {
let _ = x;
}
}
"#,
);
}
#[test]
fn keep_used() {
cov_mark::check!(keep_used);
check_assist_not_applicable(
bind_unused_param,
r#"
fn foo(x: i32, $0y: i32) { y; }
"#,
);
}
}

View file

@ -103,7 +103,6 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_>
cond, cond,
ast::Expr::BinExpr(_) ast::Expr::BinExpr(_)
| ast::Expr::BlockExpr(_) | ast::Expr::BlockExpr(_)
| ast::Expr::BoxExpr(_)
| ast::Expr::BreakExpr(_) | ast::Expr::BreakExpr(_)
| ast::Expr::CastExpr(_) | ast::Expr::CastExpr(_)
| ast::Expr::ClosureExpr(_) | ast::Expr::ClosureExpr(_)

View file

@ -15,26 +15,13 @@ use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxKind::COMMA, TextRange};
// Move an expression out of a format string. // Move an expression out of a format string.
// //
// ``` // ```
// macro_rules! format_args { // # //- minicore: fmt
// ($lit:literal $(tt:tt)*) => { 0 },
// }
// macro_rules! print {
// ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*)));
// }
//
// fn main() { // fn main() {
// print!("{var} {x + 1}$0"); // print!("{var} {x + 1}$0");
// } // }
// ``` // ```
// -> // ->
// ``` // ```
// macro_rules! format_args {
// ($lit:literal $(tt:tt)*) => { 0 },
// }
// macro_rules! print {
// ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*)));
// }
//
// fn main() { // fn main() {
// print!("{var} {}"$0, x + 1); // print!("{var} {}"$0, x + 1);
// } // }
@ -158,37 +145,21 @@ mod tests {
use super::*; use super::*;
use crate::tests::check_assist; use crate::tests::check_assist;
const MACRO_DECL: &'static str = r#"
macro_rules! format_args {
($lit:literal $(tt:tt)*) => { 0 },
}
macro_rules! print {
($($arg:tt)*) => (std::io::_print(format_args!($($arg)*)));
}
"#;
fn add_macro_decl(s: &'static str) -> String {
MACRO_DECL.to_string() + s
}
#[test] #[test]
fn multiple_middle_arg() { fn multiple_middle_arg() {
check_assist( check_assist(
extract_expressions_from_format_string, extract_expressions_from_format_string,
&add_macro_decl(
r#" r#"
//- minicore: fmt
fn main() { fn main() {
print!("{} {x + 1:b} {}$0", y + 2, 2); print!("{} {x + 1:b} {}$0", y + 2, 2);
} }
"#, "#,
),
&add_macro_decl(
r#" r#"
fn main() { fn main() {
print!("{} {:b} {}"$0, y + 2, x + 1, 2); print!("{} {:b} {}"$0, y + 2, x + 1, 2);
} }
"#, "#,
),
); );
} }
@ -196,20 +167,17 @@ fn main() {
fn single_arg() { fn single_arg() {
check_assist( check_assist(
extract_expressions_from_format_string, extract_expressions_from_format_string,
&add_macro_decl(
r#" r#"
//- minicore: fmt
fn main() { fn main() {
print!("{obj.value:b}$0",); print!("{obj.value:b}$0",);
} }
"#, "#,
),
&add_macro_decl(
r#" r#"
fn main() { fn main() {
print!("{:b}"$0, obj.value); print!("{:b}"$0, obj.value);
} }
"#, "#,
),
); );
} }
@ -217,20 +185,17 @@ fn main() {
fn multiple_middle_placeholders_arg() { fn multiple_middle_placeholders_arg() {
check_assist( check_assist(
extract_expressions_from_format_string, extract_expressions_from_format_string,
&add_macro_decl(
r#" r#"
//- minicore: fmt
fn main() { fn main() {
print!("{} {x + 1:b} {} {}$0", y + 2, 2); print!("{} {x + 1:b} {} {}$0", y + 2, 2);
} }
"#, "#,
),
&add_macro_decl(
r#" r#"
fn main() { fn main() {
print!("{} {:b} {} {}"$0, y + 2, x + 1, 2, $1); print!("{} {:b} {} {}"$0, y + 2, x + 1, 2, $1);
} }
"#, "#,
),
); );
} }
@ -238,20 +203,17 @@ fn main() {
fn multiple_trailing_args() { fn multiple_trailing_args() {
check_assist( check_assist(
extract_expressions_from_format_string, extract_expressions_from_format_string,
&add_macro_decl(
r#" r#"
//- minicore: fmt
fn main() { fn main() {
print!("{:b} {x + 1:b} {Struct(1, 2)}$0", 1); print!("{:b} {x + 1:b} {Struct(1, 2)}$0", 1);
} }
"#, "#,
),
&add_macro_decl(
r#" r#"
fn main() { fn main() {
print!("{:b} {:b} {}"$0, 1, x + 1, Struct(1, 2)); print!("{:b} {:b} {}"$0, 1, x + 1, Struct(1, 2));
} }
"#, "#,
),
); );
} }
@ -259,20 +221,17 @@ fn main() {
fn improper_commas() { fn improper_commas() {
check_assist( check_assist(
extract_expressions_from_format_string, extract_expressions_from_format_string,
&add_macro_decl(
r#" r#"
//- minicore: fmt
fn main() { fn main() {
print!("{} {x + 1:b} {Struct(1, 2)}$0", 1,); print!("{} {x + 1:b} {Struct(1, 2)}$0", 1,);
} }
"#, "#,
),
&add_macro_decl(
r#" r#"
fn main() { fn main() {
print!("{} {:b} {}"$0, 1, x + 1, Struct(1, 2)); print!("{} {:b} {}"$0, 1, x + 1, Struct(1, 2));
} }
"#, "#,
),
); );
} }
@ -280,20 +239,17 @@ fn main() {
fn nested_tt() { fn nested_tt() {
check_assist( check_assist(
extract_expressions_from_format_string, extract_expressions_from_format_string,
&add_macro_decl(
r#" r#"
//- minicore: fmt
fn main() { fn main() {
print!("My name is {} {x$0 + x}", stringify!(Paperino)) print!("My name is {} {x$0 + x}", stringify!(Paperino))
} }
"#, "#,
),
&add_macro_decl(
r#" r#"
fn main() { fn main() {
print!("My name is {} {}"$0, stringify!(Paperino), x + x) print!("My name is {} {}"$0, stringify!(Paperino), x + x)
} }
"#, "#,
),
); );
} }
@ -301,22 +257,19 @@ fn main() {
fn extract_only_expressions() { fn extract_only_expressions() {
check_assist( check_assist(
extract_expressions_from_format_string, extract_expressions_from_format_string,
&add_macro_decl(
r#" r#"
//- minicore: fmt
fn main() { fn main() {
let var = 1 + 1; let var = 1 + 1;
print!("foobar {var} {var:?} {x$0 + x}") print!("foobar {var} {var:?} {x$0 + x}")
} }
"#, "#,
),
&add_macro_decl(
r#" r#"
fn main() { fn main() {
let var = 1 + 1; let var = 1 + 1;
print!("foobar {var} {var:?} {}"$0, x + x) print!("foobar {var} {var:?} {}"$0, x + x)
} }
"#, "#,
),
); );
} }
} }

View file

@ -531,7 +531,7 @@ impl FunctionBody {
fn extracted_from_trait_impl(&self) -> bool { fn extracted_from_trait_impl(&self) -> bool {
match self.node().ancestors().find_map(ast::Impl::cast) { match self.node().ancestors().find_map(ast::Impl::cast) {
Some(c) => return c.trait_().is_some(), Some(c) => c.trait_().is_some(),
None => false, None => false,
} }
} }
@ -1048,13 +1048,9 @@ impl GenericParent {
fn generic_parents(parent: &SyntaxNode) -> Vec<GenericParent> { fn generic_parents(parent: &SyntaxNode) -> Vec<GenericParent> {
let mut list = Vec::new(); let mut list = Vec::new();
if let Some(parent_item) = parent.ancestors().find_map(ast::Item::cast) { if let Some(parent_item) = parent.ancestors().find_map(ast::Item::cast) {
match parent_item { if let ast::Item::Fn(ref fn_) = parent_item {
ast::Item::Fn(ref fn_) => { if let Some(parent_parent) =
if let Some(parent_parent) = parent_item parent_item.syntax().parent().and_then(|it| it.parent()).and_then(ast::Item::cast)
.syntax()
.parent()
.and_then(|it| it.parent())
.and_then(ast::Item::cast)
{ {
match parent_parent { match parent_parent {
ast::Item::Impl(impl_) => list.push(GenericParent::Impl(impl_)), ast::Item::Impl(impl_) => list.push(GenericParent::Impl(impl_)),
@ -1064,8 +1060,6 @@ fn generic_parents(parent: &SyntaxNode) -> Vec<GenericParent> {
} }
list.push(GenericParent::Fn(fn_.clone())); list.push(GenericParent::Fn(fn_.clone()));
} }
_ => (),
}
} }
list list
} }
@ -1728,7 +1722,7 @@ fn make_body(
let block = match &fun.body { let block = match &fun.body {
FunctionBody::Expr(expr) => { FunctionBody::Expr(expr) => {
let expr = rewrite_body_segment(ctx, &fun.params, &handler, expr.syntax()); let expr = rewrite_body_segment(ctx, &fun.params, &handler, expr.syntax());
let expr = ast::Expr::cast(expr).unwrap(); let expr = ast::Expr::cast(expr).expect("Body segment should be an expr");
match expr { match expr {
ast::Expr::BlockExpr(block) => { ast::Expr::BlockExpr(block) => {
// If the extracted expression is itself a block, there is no need to wrap it inside another block. // If the extracted expression is itself a block, there is no need to wrap it inside another block.
@ -1868,9 +1862,8 @@ fn with_tail_expr(block: ast::BlockExpr, tail_expr: ast::Expr) -> ast::BlockExpr
if let Some(stmt_list) = block.stmt_list() { if let Some(stmt_list) = block.stmt_list() {
stmt_list.syntax().children_with_tokens().for_each(|node_or_token| { stmt_list.syntax().children_with_tokens().for_each(|node_or_token| {
match &node_or_token { if let syntax::NodeOrToken::Token(_) = &node_or_token {
syntax::NodeOrToken::Token(_) => elements.push(node_or_token), elements.push(node_or_token)
_ => (),
}; };
}); });
} }
@ -1934,12 +1927,18 @@ fn fix_param_usages(ctx: &AssistContext<'_>, params: &[Param], syntax: &SyntaxNo
Some(ast::Expr::RefExpr(node)) Some(ast::Expr::RefExpr(node))
if param.kind() == ParamKind::MutRef && node.mut_token().is_some() => if param.kind() == ParamKind::MutRef && node.mut_token().is_some() =>
{ {
ted::replace(node.syntax(), node.expr().unwrap().syntax()); ted::replace(
node.syntax(),
node.expr().expect("RefExpr::expr() cannot be None").syntax(),
);
} }
Some(ast::Expr::RefExpr(node)) Some(ast::Expr::RefExpr(node))
if param.kind() == ParamKind::SharedRef && node.mut_token().is_none() => if param.kind() == ParamKind::SharedRef && node.mut_token().is_none() =>
{ {
ted::replace(node.syntax(), node.expr().unwrap().syntax()); ted::replace(
node.syntax(),
node.expr().expect("RefExpr::expr() cannot be None").syntax(),
);
} }
Some(_) | None => { Some(_) | None => {
let p = &make::expr_prefix(T![*], usage.clone()).clone_for_update(); let p = &make::expr_prefix(T![*], usage.clone()).clone_for_update();

View file

@ -0,0 +1,205 @@
use hir::{AsAssocItem, HirDisplay};
use ide_db::{
assists::{AssistId, AssistKind},
famous_defs::FamousDefs,
};
use syntax::{ast, AstNode};
use crate::assist_context::{AssistContext, Assists};
// Assist: into_to_qualified_from
//
// Convert an `into` method call to a fully qualified `from` call.
//
// ```
// //- minicore: from
// struct B;
// impl From<i32> for B {
// fn from(a: i32) -> Self {
// B
// }
// }
//
// fn main() -> () {
// let a = 3;
// let b: B = a.in$0to();
// }
// ```
// ->
// ```
// struct B;
// impl From<i32> for B {
// fn from(a: i32) -> Self {
// B
// }
// }
//
// fn main() -> () {
// let a = 3;
// let b: B = B::from(a);
// }
// ```
pub(crate) fn into_to_qualified_from(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let method_call: ast::MethodCallExpr = ctx.find_node_at_offset()?;
let nameref = method_call.name_ref()?;
let receiver = method_call.receiver()?;
let db = ctx.db();
let sema = &ctx.sema;
let fnc = sema.resolve_method_call(&method_call)?;
let scope = sema.scope(method_call.syntax())?;
// Check if the method call refers to Into trait.
if fnc.as_assoc_item(db)?.containing_trait_impl(db)?
== FamousDefs(sema, scope.krate()).core_convert_Into()?
{
let type_call = sema.type_of_expr(&method_call.clone().into())?;
let type_call_disp =
type_call.adjusted().display_source_code(db, scope.module().into(), true).ok()?;
acc.add(
AssistId("into_to_qualified_from", AssistKind::Generate),
"Convert `into` to fully qualified `from`",
nameref.syntax().text_range(),
|edit| {
edit.replace(
method_call.syntax().text_range(),
format!("{}::from({})", type_call_disp, receiver),
);
},
);
}
Some(())
}
#[cfg(test)]
mod tests {
use crate::tests::check_assist;
use super::into_to_qualified_from;
#[test]
fn two_types_in_same_mod() {
check_assist(
into_to_qualified_from,
r#"
//- minicore: from
struct A;
struct B;
impl From<A> for B {
fn from(a: A) -> Self {
B
}
}
fn main() -> () {
let a: A = A;
let b: B = a.in$0to();
}"#,
r#"
struct A;
struct B;
impl From<A> for B {
fn from(a: A) -> Self {
B
}
}
fn main() -> () {
let a: A = A;
let b: B = B::from(a);
}"#,
)
}
#[test]
fn fromed_in_child_mod_imported() {
check_assist(
into_to_qualified_from,
r#"
//- minicore: from
use C::B;
struct A;
mod C {
use crate::A;
pub(super) struct B;
impl From<A> for B {
fn from(a: A) -> Self {
B
}
}
}
fn main() -> () {
let a: A = A;
let b: B = a.in$0to();
}"#,
r#"
use C::B;
struct A;
mod C {
use crate::A;
pub(super) struct B;
impl From<A> for B {
fn from(a: A) -> Self {
B
}
}
}
fn main() -> () {
let a: A = A;
let b: B = B::from(a);
}"#,
)
}
#[test]
fn fromed_in_child_mod_not_imported() {
check_assist(
into_to_qualified_from,
r#"
//- minicore: from
struct A;
mod C {
use crate::A;
pub(super) struct B;
impl From<A> for B {
fn from(a: A) -> Self {
B
}
}
}
fn main() -> () {
let a: A = A;
let b: C::B = a.in$0to();
}"#,
r#"
struct A;
mod C {
use crate::A;
pub(super) struct B;
impl From<A> for B {
fn from(a: A) -> Self {
B
}
}
}
fn main() -> () {
let a: A = A;
let b: C::B = C::B::from(a);
}"#,
)
}
}

View file

@ -76,12 +76,19 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>)
let name = to_upper_snake_case(&name.to_string()); let name = to_upper_snake_case(&name.to_string());
let usages = Definition::Local(local).usages(&ctx.sema).all(); let usages = Definition::Local(local).usages(&ctx.sema).all();
if let Some(usages) = usages.references.get(&ctx.file_id()) { if let Some(usages) = usages.references.get(&ctx.file_id()) {
let name = make::name_ref(&name); let name_ref = make::name_ref(&name);
for usage in usages { for usage in usages {
let Some(usage) = usage.name.as_name_ref().cloned() else { continue }; let Some(usage) = usage.name.as_name_ref().cloned() else { continue };
if let Some(record_field) = ast::RecordExprField::for_name_ref(&usage) {
let record_field = edit.make_mut(record_field);
let name_expr =
make::expr_path(make::path_from_text(&name)).clone_for_update();
record_field.replace_expr(name_expr);
} else {
let usage = edit.make_mut(usage); let usage = edit.make_mut(usage);
ted::replace(usage.syntax(), name.clone_for_update().syntax()); ted::replace(usage.syntax(), name_ref.clone_for_update().syntax());
}
} }
} }
@ -120,8 +127,7 @@ fn is_body_const(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> bool {
is_const &= is_const &=
sema.resolve_method_call(&call).map(|it| it.is_const(sema.db)).unwrap_or(true) sema.resolve_method_call(&call).map(|it| it.is_const(sema.db)).unwrap_or(true)
} }
ast::Expr::BoxExpr(_) ast::Expr::ForExpr(_)
| ast::Expr::ForExpr(_)
| ast::Expr::ReturnExpr(_) | ast::Expr::ReturnExpr(_)
| ast::Expr::TryExpr(_) | ast::Expr::TryExpr(_)
| ast::Expr::YieldExpr(_) | ast::Expr::YieldExpr(_)
@ -179,6 +185,33 @@ fn foo() {
); );
} }
#[test]
fn usage_in_field_shorthand() {
check_assist(
promote_local_to_const,
r"
struct Foo {
bar: usize,
}
fn main() {
let $0bar = 0;
let foo = Foo { bar };
}
",
r"
struct Foo {
bar: usize,
}
fn main() {
const $0BAR: usize = 0;
let foo = Foo { bar: BAR };
}
",
)
}
#[test] #[test]
fn not_applicable_non_const_meth_call() { fn not_applicable_non_const_meth_call() {
cov_mark::check!(promote_local_non_const); cov_mark::check!(promote_local_non_const);

View file

@ -113,10 +113,7 @@ fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Opt
Some(parent) => match (expr, parent) { Some(parent) => match (expr, parent) {
(ast::Expr::CastExpr(_), ast::Expr::CastExpr(_)) => false, (ast::Expr::CastExpr(_), ast::Expr::CastExpr(_)) => false,
( (
ast::Expr::BoxExpr(_) ast::Expr::PrefixExpr(_) | ast::Expr::RefExpr(_) | ast::Expr::MacroExpr(_),
| ast::Expr::PrefixExpr(_)
| ast::Expr::RefExpr(_)
| ast::Expr::MacroExpr(_),
ast::Expr::AwaitExpr(_) ast::Expr::AwaitExpr(_)
| ast::Expr::CallExpr(_) | ast::Expr::CallExpr(_)
| ast::Expr::CastExpr(_) | ast::Expr::CastExpr(_)

View file

@ -48,6 +48,11 @@ pub(crate) fn wrap_return_type_in_result(acc: &mut Assists, ctx: &AssistContext<
return None; return None;
} }
let new_result_ty =
make::ext::ty_result(type_ref.clone(), make::ty_placeholder()).clone_for_update();
let generic_args = new_result_ty.syntax().descendants().find_map(ast::GenericArgList::cast)?;
let last_genarg = generic_args.generic_args().last()?;
acc.add( acc.add(
AssistId("wrap_return_type_in_result", AssistKind::RefactorRewrite), AssistId("wrap_return_type_in_result", AssistKind::RefactorRewrite),
"Wrap return type in Result", "Wrap return type in Result",
@ -75,19 +80,12 @@ pub(crate) fn wrap_return_type_in_result(acc: &mut Assists, ctx: &AssistContext<
ted::replace(ret_expr_arg.syntax(), ok_wrapped.syntax()); ted::replace(ret_expr_arg.syntax(), ok_wrapped.syntax());
} }
let new_result_ty =
make::ext::ty_result(type_ref.clone(), make::ty_placeholder()).clone_for_update();
let old_result_ty = edit.make_mut(type_ref.clone()); let old_result_ty = edit.make_mut(type_ref.clone());
ted::replace(old_result_ty.syntax(), new_result_ty.syntax()); ted::replace(old_result_ty.syntax(), new_result_ty.syntax());
if let Some(cap) = ctx.config.snippet_cap { if let Some(cap) = ctx.config.snippet_cap {
let generic_args = new_result_ty edit.add_placeholder_snippet(cap, last_genarg);
.syntax()
.descendants()
.find_map(ast::GenericArgList::cast)
.unwrap();
edit.add_placeholder_snippet(cap, generic_args.generic_args().last().unwrap());
} }
}, },
) )

View file

@ -114,6 +114,7 @@ mod handlers {
mod add_turbo_fish; mod add_turbo_fish;
mod apply_demorgan; mod apply_demorgan;
mod auto_import; mod auto_import;
mod bind_unused_param;
mod change_visibility; mod change_visibility;
mod convert_bool_then; mod convert_bool_then;
mod convert_comment_block; mod convert_comment_block;
@ -211,6 +212,7 @@ mod handlers {
mod unwrap_result_return_type; mod unwrap_result_return_type;
mod unqualify_method_call; mod unqualify_method_call;
mod wrap_return_type_in_result; mod wrap_return_type_in_result;
mod into_to_qualified_from;
pub(crate) fn all() -> &'static [Handler] { pub(crate) fn all() -> &'static [Handler] {
&[ &[
@ -224,6 +226,7 @@ mod handlers {
add_turbo_fish::add_turbo_fish, add_turbo_fish::add_turbo_fish,
apply_demorgan::apply_demorgan, apply_demorgan::apply_demorgan,
auto_import::auto_import, auto_import::auto_import,
bind_unused_param::bind_unused_param,
change_visibility::change_visibility, change_visibility::change_visibility,
convert_bool_then::convert_bool_then_to_if, convert_bool_then::convert_bool_then_to_if,
convert_bool_then::convert_if_to_bool_then, convert_bool_then::convert_if_to_bool_then,
@ -274,6 +277,7 @@ mod handlers {
inline_local_variable::inline_local_variable, inline_local_variable::inline_local_variable,
inline_type_alias::inline_type_alias, inline_type_alias::inline_type_alias,
inline_type_alias::inline_type_alias_uses, inline_type_alias::inline_type_alias_uses,
into_to_qualified_from::into_to_qualified_from,
introduce_named_generic::introduce_named_generic, introduce_named_generic::introduce_named_generic,
introduce_named_lifetime::introduce_named_lifetime, introduce_named_lifetime::introduce_named_lifetime,
invert_if::invert_if, invert_if::invert_if,

View file

@ -265,6 +265,21 @@ pub mod std { pub mod collections { pub struct HashMap { } } }
) )
} }
#[test]
fn doctest_bind_unused_param() {
check_doc_test(
"bind_unused_param",
r#####"
fn some_function(x: i32$0) {}
"#####,
r#####"
fn some_function(x: i32) {
let _ = x;
}
"#####,
)
}
#[test] #[test]
fn doctest_change_visibility() { fn doctest_change_visibility() {
check_doc_test( check_doc_test(
@ -694,25 +709,12 @@ fn doctest_extract_expressions_from_format_string() {
check_doc_test( check_doc_test(
"extract_expressions_from_format_string", "extract_expressions_from_format_string",
r#####" r#####"
macro_rules! format_args { //- minicore: fmt
($lit:literal $(tt:tt)*) => { 0 },
}
macro_rules! print {
($($arg:tt)*) => (std::io::_print(format_args!($($arg)*)));
}
fn main() { fn main() {
print!("{var} {x + 1}$0"); print!("{var} {x + 1}$0");
} }
"#####, "#####,
r#####" r#####"
macro_rules! format_args {
($lit:literal $(tt:tt)*) => { 0 },
}
macro_rules! print {
($($arg:tt)*) => (std::io::_print(format_args!($($arg)*)));
}
fn main() { fn main() {
print!("{var} {}"$0, x + 1); print!("{var} {}"$0, x + 1);
} }
@ -1754,6 +1756,40 @@ fn foo() {
) )
} }
#[test]
fn doctest_into_to_qualified_from() {
check_doc_test(
"into_to_qualified_from",
r#####"
//- minicore: from
struct B;
impl From<i32> for B {
fn from(a: i32) -> Self {
B
}
}
fn main() -> () {
let a = 3;
let b: B = a.in$0to();
}
"#####,
r#####"
struct B;
impl From<i32> for B {
fn from(a: i32) -> Self {
B
}
}
fn main() -> () {
let a = 3;
let b: B = B::from(a);
}
"#####,
)
}
#[test] #[test]
fn doctest_introduce_named_generic() { fn doctest_introduce_named_generic() {
check_doc_test( check_doc_test(

View file

@ -103,7 +103,6 @@ pub(crate) fn for_variable(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>)
match expr { match expr {
ast::Expr::RefExpr(inner) => next_expr = inner.expr(), ast::Expr::RefExpr(inner) => next_expr = inner.expr(),
ast::Expr::BoxExpr(inner) => next_expr = inner.expr(),
ast::Expr::AwaitExpr(inner) => next_expr = inner.expr(), ast::Expr::AwaitExpr(inner) => next_expr = inner.expr(),
// ast::Expr::BlockExpr(block) => expr = block.tail_expr(), // ast::Expr::BlockExpr(block) => expr = block.tail_expr(),
ast::Expr::CastExpr(inner) => next_expr = inner.expr(), ast::Expr::CastExpr(inner) => next_expr = inner.expr(),

View file

@ -1,10 +1,8 @@
//! Completion for cfg //! Completion for cfg
use std::iter;
use ide_db::SymbolKind; use ide_db::SymbolKind;
use itertools::Itertools; use itertools::Itertools;
use syntax::SyntaxKind; use syntax::{algo, ast::Ident, AstToken, Direction, NodeOrToken, SyntaxKind};
use crate::{completions::Completions, context::CompletionContext, CompletionItem}; use crate::{completions::Completions, context::CompletionContext, CompletionItem};
@ -15,31 +13,44 @@ pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext<'_>) {
acc.add(completion.build(ctx.db)); acc.add(completion.build(ctx.db));
}; };
let previous = iter::successors(ctx.original_token.prev_token(), |t| { // FIXME: Move this into context/analysis.rs
(matches!(t.kind(), SyntaxKind::EQ) || t.kind().is_trivia()) let previous = ctx
.then(|| t.prev_token()) .original_token
.flatten() .prev_token()
.and_then(|it| {
if matches!(it.kind(), SyntaxKind::EQ) {
Some(it.into())
} else {
algo::non_trivia_sibling(it.into(), Direction::Prev)
}
}) })
.find(|t| matches!(t.kind(), SyntaxKind::IDENT)); .filter(|t| matches!(t.kind(), SyntaxKind::EQ))
.and_then(|it| algo::non_trivia_sibling(it.prev_sibling_or_token()?, Direction::Prev))
match previous.as_ref().map(|p| p.text()) { .map(|it| match it {
Some("target_arch") => KNOWN_ARCH.iter().copied().for_each(add_completion), NodeOrToken::Node(_) => None,
Some("target_env") => KNOWN_ENV.iter().copied().for_each(add_completion), NodeOrToken::Token(t) => Ident::cast(t),
Some("target_os") => KNOWN_OS.iter().copied().for_each(add_completion), });
Some("target_vendor") => KNOWN_VENDOR.iter().copied().for_each(add_completion), match previous {
Some("target_endian") => ["little", "big"].into_iter().for_each(add_completion), Some(None) => (),
Some(name) => ctx.krate.potential_cfg(ctx.db).get_cfg_values(name).cloned().for_each(|s| { Some(Some(p)) => match p.text() {
"target_arch" => KNOWN_ARCH.iter().copied().for_each(add_completion),
"target_env" => KNOWN_ENV.iter().copied().for_each(add_completion),
"target_os" => KNOWN_OS.iter().copied().for_each(add_completion),
"target_vendor" => KNOWN_VENDOR.iter().copied().for_each(add_completion),
"target_endian" => ["little", "big"].into_iter().for_each(add_completion),
name => ctx.krate.potential_cfg(ctx.db).get_cfg_values(name).cloned().for_each(|s| {
let insert_text = format!(r#""{s}""#); let insert_text = format!(r#""{s}""#);
let mut item = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), s); let mut item = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), s);
item.insert_text(insert_text); item.insert_text(insert_text);
acc.add(item.build(ctx.db)); acc.add(item.build(ctx.db));
}), }),
},
None => ctx.krate.potential_cfg(ctx.db).get_cfg_keys().cloned().unique().for_each(|s| { None => ctx.krate.potential_cfg(ctx.db).get_cfg_keys().cloned().unique().for_each(|s| {
let item = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), s); let item = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), s);
acc.add(item.build(ctx.db)); acc.add(item.build(ctx.db));
}), }),
}; }
} }
const KNOWN_ARCH: [&str; 20] = [ const KNOWN_ARCH: [&str; 20] = [

View file

@ -1,6 +1,6 @@
//! Completion for derives //! Completion for derives
use hir::{HasAttrs, ScopeDef}; use hir::ScopeDef;
use ide_db::SymbolKind; use ide_db::{documentation::HasDocs, SymbolKind};
use itertools::Itertools; use itertools::Itertools;
use syntax::SmolStr; use syntax::SmolStr;

View file

@ -1,5 +1,5 @@
//! Completion for lints //! Completion for lints
use ide_db::{generated::lints::Lint, SymbolKind}; use ide_db::{documentation::Documentation, generated::lints::Lint, SymbolKind};
use syntax::ast; use syntax::ast;
use crate::{context::CompletionContext, item::CompletionItem, Completions}; use crate::{context::CompletionContext, item::CompletionItem, Completions};
@ -55,7 +55,7 @@ pub(super) fn complete_lint(
_ => name.to_owned(), _ => name.to_owned(),
}; };
let mut item = CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), label); let mut item = CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), label);
item.documentation(hir::Documentation::new(description.to_owned())); item.documentation(Documentation::new(description.to_owned()));
item.add_to(acc, ctx.db) item.add_to(acc, ctx.db)
} }
} }

View file

@ -1,7 +1,7 @@
//! Completion for extern crates //! Completion for extern crates
use hir::{HasAttrs, Name}; use hir::Name;
use ide_db::SymbolKind; use ide_db::{documentation::HasDocs, SymbolKind};
use crate::{context::CompletionContext, CompletionItem, CompletionItemKind}; use crate::{context::CompletionContext, CompletionItem, CompletionItemKind};

View file

@ -51,9 +51,7 @@ mod tests {
fn works_when_wrapped() { fn works_when_wrapped() {
check( check(
r#" r#"
macro_rules! format_args { //- minicore: fmt
($lit:literal $(tt:tt)*) => { 0 },
}
macro_rules! print { macro_rules! print {
($($arg:tt)*) => (std::io::_print(format_args!($($arg)*))); ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*)));
} }
@ -70,9 +68,7 @@ fn main() {
fn no_completion_without_brace() { fn no_completion_without_brace() {
check( check(
r#" r#"
macro_rules! format_args { //- minicore: fmt
($lit:literal $(tt:tt)*) => { 0 },
}
fn main() { fn main() {
let foobar = 1; let foobar = 1;
format_args!("f$0"); format_args!("f$0");
@ -87,18 +83,13 @@ fn main() {
check_edit( check_edit(
"foobar", "foobar",
r#" r#"
macro_rules! format_args { //- minicore: fmt
($lit:literal $(tt:tt)*) => { 0 },
}
fn main() { fn main() {
let foobar = 1; let foobar = 1;
format_args!("{f$0"); format_args!("{f$0");
} }
"#, "#,
r#" r#"
macro_rules! format_args {
($lit:literal $(tt:tt)*) => { 0 },
}
fn main() { fn main() {
let foobar = 1; let foobar = 1;
format_args!("{foobar"); format_args!("{foobar");
@ -108,18 +99,13 @@ fn main() {
check_edit( check_edit(
"foobar", "foobar",
r#" r#"
macro_rules! format_args { //- minicore: fmt
($lit:literal $(tt:tt)*) => { 0 },
}
fn main() { fn main() {
let foobar = 1; let foobar = 1;
format_args!("{$0"); format_args!("{$0");
} }
"#, "#,
r#" r#"
macro_rules! format_args {
($lit:literal $(tt:tt)*) => { 0 },
}
fn main() { fn main() {
let foobar = 1; let foobar = 1;
format_args!("{foobar"); format_args!("{foobar");

View file

@ -33,8 +33,8 @@
use hir::{self, HasAttrs}; use hir::{self, HasAttrs};
use ide_db::{ use ide_db::{
path_transform::PathTransform, syntax_helpers::insert_whitespace_into_node, documentation::HasDocs, path_transform::PathTransform,
traits::get_missing_assoc_items, SymbolKind, syntax_helpers::insert_whitespace_into_node, traits::get_missing_assoc_items, SymbolKind,
}; };
use syntax::{ use syntax::{
ast::{self, edit_in_place::AttrsOwnerEdit, HasTypeBounds}, ast::{self, edit_in_place::AttrsOwnerEdit, HasTypeBounds},

View file

@ -2,8 +2,12 @@
mod format_like; mod format_like;
use hir::{Documentation, HasAttrs}; use ide_db::{
use ide_db::{imports::insert_use::ImportScope, ty_filter::TryEnum, SnippetCap}; documentation::{Documentation, HasDocs},
imports::insert_use::ImportScope,
ty_filter::TryEnum,
SnippetCap,
};
use syntax::{ use syntax::{
ast::{self, make, AstNode, AstToken}, ast::{self, make, AstNode, AstToken},
SyntaxKind::{BLOCK_EXPR, EXPR_STMT, FOR_EXPR, IF_EXPR, LOOP_EXPR, STMT_LIST, WHILE_EXPR}, SyntaxKind::{BLOCK_EXPR, EXPR_STMT, FOR_EXPR, IF_EXPR, LOOP_EXPR, STMT_LIST, WHILE_EXPR},

View file

@ -1,7 +1,6 @@
//! This file provides snippet completions, like `pd` => `eprintln!(...)`. //! This file provides snippet completions, like `pd` => `eprintln!(...)`.
use hir::Documentation; use ide_db::{documentation::Documentation, imports::insert_use::ImportScope, SnippetCap};
use ide_db::{imports::insert_use::ImportScope, SnippetCap};
use crate::{ use crate::{
context::{ExprCtx, ItemListKind, PathCompletionCtx, Qualified}, context::{ExprCtx, ItemListKind, PathCompletionCtx, Qualified},

View file

@ -2,8 +2,11 @@
use std::fmt; use std::fmt;
use hir::{Documentation, Mutability}; use hir::Mutability;
use ide_db::{imports::import_assets::LocatedImport, RootDatabase, SnippetCap, SymbolKind}; use ide_db::{
documentation::Documentation, imports::import_assets::LocatedImport, RootDatabase, SnippetCap,
SymbolKind,
};
use itertools::Itertools; use itertools::Itertools;
use smallvec::SmallVec; use smallvec::SmallVec;
use stdx::{impl_from, never}; use stdx::{impl_from, never};

View file

@ -12,7 +12,10 @@ pub(crate) mod literal;
use hir::{AsAssocItem, HasAttrs, HirDisplay, ScopeDef}; use hir::{AsAssocItem, HasAttrs, HirDisplay, ScopeDef};
use ide_db::{ use ide_db::{
helpers::item_name, imports::import_assets::LocatedImport, RootDatabase, SnippetCap, SymbolKind, documentation::{Documentation, HasDocs},
helpers::item_name,
imports::import_assets::LocatedImport,
RootDatabase, SnippetCap, SymbolKind,
}; };
use syntax::{AstNode, SmolStr, SyntaxKind, TextRange}; use syntax::{AstNode, SmolStr, SyntaxKind, TextRange};
@ -114,7 +117,7 @@ impl<'a> RenderContext<'a> {
} }
// FIXME: remove this // FIXME: remove this
fn docs(&self, def: impl HasAttrs) -> Option<hir::Documentation> { fn docs(&self, def: impl HasDocs) -> Option<Documentation> {
def.docs(self.db()) def.docs(self.db())
} }
} }
@ -409,7 +412,7 @@ fn res_to_kind(resolution: ScopeDef) -> CompletionItemKind {
} }
} }
fn scope_def_docs(db: &RootDatabase, resolution: ScopeDef) -> Option<hir::Documentation> { fn scope_def_docs(db: &RootDatabase, resolution: ScopeDef) -> Option<Documentation> {
use hir::ModuleDef::*; use hir::ModuleDef::*;
match resolution { match resolution {
ScopeDef::ModuleDef(Module(it)) => it.docs(db), ScopeDef::ModuleDef(Module(it)) => it.docs(db),

View file

@ -1,7 +1,10 @@
//! Renderer for `enum` variants. //! Renderer for `enum` variants.
use hir::{db::HirDatabase, Documentation, HasAttrs, StructKind}; use hir::{db::HirDatabase, StructKind};
use ide_db::SymbolKind; use ide_db::{
documentation::{Documentation, HasDocs},
SymbolKind,
};
use crate::{ use crate::{
context::{CompletionContext, PathCompletionCtx, PathKind}, context::{CompletionContext, PathCompletionCtx, PathKind},

View file

@ -1,7 +1,7 @@
//! Renderer for macro invocations. //! Renderer for macro invocations.
use hir::{Documentation, HirDisplay}; use hir::HirDisplay;
use ide_db::SymbolKind; use ide_db::{documentation::Documentation, SymbolKind};
use syntax::SmolStr; use syntax::SmolStr;
use crate::{ use crate::{

View file

@ -1,7 +1,7 @@
//! Renderer for patterns. //! Renderer for patterns.
use hir::{db::HirDatabase, HasAttrs, Name, StructKind}; use hir::{db::HirDatabase, Name, StructKind};
use ide_db::SnippetCap; use ide_db::{documentation::HasDocs, SnippetCap};
use itertools::Itertools; use itertools::Itertools;
use syntax::SmolStr; use syntax::SmolStr;
@ -103,7 +103,7 @@ fn build_completion(
label: SmolStr, label: SmolStr,
lookup: SmolStr, lookup: SmolStr,
pat: String, pat: String,
def: impl HasAttrs + Copy, def: impl HasDocs + Copy,
adt_ty: hir::Type, adt_ty: hir::Type,
// Missing in context of match statement completions // Missing in context of match statement completions
is_variant_missing: bool, is_variant_missing: bool,

View file

@ -66,11 +66,6 @@ struct Foo;
) )
} }
#[test]
fn inside_nested_attr() {
check(r#"#[cfg($0)]"#, expect![[]])
}
#[test] #[test]
fn with_existing_attr() { fn with_existing_attr() {
check( check(
@ -635,6 +630,32 @@ struct Foo;
mod cfg { mod cfg {
use super::*; use super::*;
#[test]
fn inside_cfg() {
check(
r#"
//- /main.rs cfg:test,dbg=false,opt_level=2
#[cfg($0)]
"#,
expect![[r#"
ba dbg
ba opt_level
ba test
"#]],
);
check(
r#"
//- /main.rs cfg:test,dbg=false,opt_level=2
#[cfg(b$0)]
"#,
expect![[r#"
ba dbg
ba opt_level
ba test
"#]],
);
}
#[test] #[test]
fn cfg_target_endian() { fn cfg_target_endian() {
check( check(
@ -644,6 +665,13 @@ mod cfg {
ba little ba little
"#]], "#]],
); );
check(
r#"#[cfg(target_endian = b$0"#,
expect![[r#"
ba big
ba little
"#]],
);
} }
} }

View file

@ -0,0 +1,281 @@
//! Documentation attribute related utilties.
use either::Either;
use hir::{
db::{DefDatabase, HirDatabase},
resolve_doc_path_on, AttrId, AttrSourceMap, AttrsWithOwner, HasAttrs, InFile,
};
use itertools::Itertools;
use syntax::{
ast::{self, IsString},
AstToken,
};
use text_edit::{TextRange, TextSize};
/// Holds documentation
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Documentation(String);
impl Documentation {
pub fn new(s: String) -> Self {
Documentation(s)
}
pub fn as_str(&self) -> &str {
&self.0
}
}
impl From<Documentation> for String {
fn from(Documentation(string): Documentation) -> Self {
string
}
}
pub trait HasDocs: HasAttrs {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation>;
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<hir::Namespace>,
) -> Option<hir::DocLinkDef>;
}
/// A struct to map text ranges from [`Documentation`] back to TextRanges in the syntax tree.
#[derive(Debug)]
pub struct DocsRangeMap {
source_map: AttrSourceMap,
// (docstring-line-range, attr_index, attr-string-range)
// a mapping from the text range of a line of the [`Documentation`] to the attribute index and
// the original (untrimmed) syntax doc line
mapping: Vec<(TextRange, AttrId, TextRange)>,
}
impl DocsRangeMap {
/// Maps a [`TextRange`] relative to the documentation string back to its AST range
pub fn map(&self, range: TextRange) -> Option<InFile<TextRange>> {
let found = self.mapping.binary_search_by(|(probe, ..)| probe.ordering(range)).ok()?;
let (line_docs_range, idx, original_line_src_range) = self.mapping[found];
if !line_docs_range.contains_range(range) {
return None;
}
let relative_range = range - line_docs_range.start();
let InFile { file_id, value: source } = self.source_map.source_of_id(idx);
match source {
Either::Left(attr) => {
let string = get_doc_string_in_attr(attr)?;
let text_range = string.open_quote_text_range()?;
let range = TextRange::at(
text_range.end() + original_line_src_range.start() + relative_range.start(),
string.syntax().text_range().len().min(range.len()),
);
Some(InFile { file_id, value: range })
}
Either::Right(comment) => {
let text_range = comment.syntax().text_range();
let range = TextRange::at(
text_range.start()
+ TextSize::try_from(comment.prefix().len()).ok()?
+ original_line_src_range.start()
+ relative_range.start(),
text_range.len().min(range.len()),
);
Some(InFile { file_id, value: range })
}
}
}
}
pub fn docs_with_rangemap(
db: &dyn DefDatabase,
attrs: &AttrsWithOwner,
) -> Option<(Documentation, DocsRangeMap)> {
let docs =
attrs.by_key("doc").attrs().filter_map(|attr| attr.string_value().map(|s| (s, attr.id)));
let indent = doc_indent(attrs);
let mut buf = String::new();
let mut mapping = Vec::new();
for (doc, idx) in docs {
if !doc.is_empty() {
let mut base_offset = 0;
for raw_line in doc.split('\n') {
let line = raw_line.trim_end();
let line_len = line.len();
let (offset, line) = match line.char_indices().nth(indent) {
Some((offset, _)) => (offset, &line[offset..]),
None => (0, line),
};
let buf_offset = buf.len();
buf.push_str(line);
mapping.push((
TextRange::new(buf_offset.try_into().ok()?, buf.len().try_into().ok()?),
idx,
TextRange::at(
(base_offset + offset).try_into().ok()?,
line_len.try_into().ok()?,
),
));
buf.push('\n');
base_offset += raw_line.len() + 1;
}
} else {
buf.push('\n');
}
}
buf.pop();
if buf.is_empty() {
None
} else {
Some((Documentation(buf), DocsRangeMap { mapping, source_map: attrs.source_map(db) }))
}
}
pub fn docs_from_attrs(attrs: &hir::Attrs) -> Option<String> {
let docs = attrs.by_key("doc").attrs().filter_map(|attr| attr.string_value());
let indent = doc_indent(attrs);
let mut buf = String::new();
for doc in docs {
// str::lines doesn't yield anything for the empty string
if !doc.is_empty() {
buf.extend(Itertools::intersperse(
doc.lines().map(|line| {
line.char_indices()
.nth(indent)
.map_or(line, |(offset, _)| &line[offset..])
.trim_end()
}),
"\n",
));
}
buf.push('\n');
}
buf.pop();
if buf.is_empty() {
None
} else {
Some(buf)
}
}
macro_rules! impl_has_docs {
($($def:ident,)*) => {$(
impl HasDocs for hir::$def {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
docs_from_attrs(&self.attrs(db)).map(Documentation)
}
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<hir::Namespace>
) -> Option<hir::DocLinkDef> {
resolve_doc_path_on(db, self, link, ns)
}
}
)*};
}
impl_has_docs![
Variant, Field, Static, Const, Trait, TraitAlias, TypeAlias, Macro, Function, Adt, Module,
Impl,
];
macro_rules! impl_has_docs_enum {
($($variant:ident),* for $enum:ident) => {$(
impl HasDocs for hir::$variant {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
hir::$enum::$variant(self).docs(db)
}
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<hir::Namespace>
) -> Option<hir::DocLinkDef> {
hir::$enum::$variant(self).resolve_doc_path(db, link, ns)
}
}
)*};
}
impl_has_docs_enum![Struct, Union, Enum for Adt];
impl HasDocs for hir::AssocItem {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
match self {
hir::AssocItem::Function(it) => it.docs(db),
hir::AssocItem::Const(it) => it.docs(db),
hir::AssocItem::TypeAlias(it) => it.docs(db),
}
}
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<hir::Namespace>,
) -> Option<hir::DocLinkDef> {
match self {
hir::AssocItem::Function(it) => it.resolve_doc_path(db, link, ns),
hir::AssocItem::Const(it) => it.resolve_doc_path(db, link, ns),
hir::AssocItem::TypeAlias(it) => it.resolve_doc_path(db, link, ns),
}
}
}
impl HasDocs for hir::ExternCrateDecl {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
let crate_docs =
docs_from_attrs(&self.resolved_crate(db)?.root_module().attrs(db)).map(String::from);
let decl_docs = docs_from_attrs(&self.attrs(db)).map(String::from);
match (decl_docs, crate_docs) {
(None, None) => None,
(Some(decl_docs), None) => Some(decl_docs),
(None, Some(crate_docs)) => Some(crate_docs),
(Some(mut decl_docs), Some(crate_docs)) => {
decl_docs.push('\n');
decl_docs.push('\n');
decl_docs += &crate_docs;
Some(decl_docs)
}
}
.map(Documentation::new)
}
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<hir::Namespace>,
) -> Option<hir::DocLinkDef> {
resolve_doc_path_on(db, self, link, ns)
}
}
fn get_doc_string_in_attr(it: &ast::Attr) -> Option<ast::String> {
match it.expr() {
// #[doc = lit]
Some(ast::Expr::Literal(lit)) => match lit.kind() {
ast::LiteralKind::String(it) => Some(it),
_ => None,
},
// #[cfg_attr(..., doc = "", ...)]
None => {
// FIXME: See highlight injection for what to do here
None
}
_ => None,
}
}
fn doc_indent(attrs: &hir::Attrs) -> usize {
attrs
.by_key("doc")
.attrs()
.filter_map(|attr| attr.string_value())
.flat_map(|s| s.lines())
.filter(|line| !line.chars().all(|c| c.is_whitespace()))
.map(|line| line.chars().take_while(|c| c.is_whitespace()).count())
.min()
.unwrap_or(0)
}

View file

@ -22,6 +22,7 @@ pub mod symbol_index;
pub mod traits; pub mod traits;
pub mod ty_filter; pub mod ty_filter;
pub mod use_trivial_constructor; pub mod use_trivial_constructor;
pub mod documentation;
pub mod imports { pub mod imports {
pub mod import_assets; pub mod import_assets;

View file

@ -71,12 +71,29 @@ impl Definition {
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
new_name: &str, new_name: &str,
) -> Result<SourceChange> { ) -> Result<SourceChange> {
// self.krate() returns None if
// self is a built-in attr, built-in type or tool module.
// it is not allowed for these defs to be renamed.
// cases where self.krate() is None is handled below.
if let Some(krate) = self.krate(sema.db) {
if !krate.origin(sema.db).is_local() {
bail!("Cannot rename a non-local definition.")
}
}
match *self { match *self {
Definition::Module(module) => rename_mod(sema, module, new_name), Definition::Module(module) => rename_mod(sema, module, new_name),
Definition::ToolModule(_) => {
bail!("Cannot rename a tool module")
}
Definition::BuiltinType(_) => { Definition::BuiltinType(_) => {
bail!("Cannot rename builtin type") bail!("Cannot rename builtin type")
} }
Definition::BuiltinAttr(_) => {
bail!("Cannot rename a builtin attr.")
}
Definition::SelfType(_) => bail!("Cannot rename `Self`"), Definition::SelfType(_) => bail!("Cannot rename `Self`"),
Definition::Macro(mac) => rename_reference(sema, Definition::Macro(mac), new_name),
def => rename_reference(sema, def, new_name), def => rename_reference(sema, def, new_name),
} }
} }

View file

@ -1,5 +1,7 @@
//! Rustdoc specific doc comment handling //! Rustdoc specific doc comment handling
use crate::documentation::Documentation;
// stripped down version of https://github.com/rust-lang/rust/blob/392ba2ba1a7d6c542d2459fb8133bebf62a4a423/src/librustdoc/html/markdown.rs#L810-L933 // stripped down version of https://github.com/rust-lang/rust/blob/392ba2ba1a7d6c542d2459fb8133bebf62a4a423/src/librustdoc/html/markdown.rs#L810-L933
pub fn is_rust_fence(s: &str) -> bool { pub fn is_rust_fence(s: &str) -> bool {
let mut seen_rust_tags = false; let mut seen_rust_tags = false;
@ -32,3 +34,170 @@ pub fn is_rust_fence(s: &str) -> bool {
!seen_other_tags || seen_rust_tags !seen_other_tags || seen_rust_tags
} }
const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
pub fn format_docs(src: &Documentation) -> String {
format_docs_(src.as_str())
}
fn format_docs_(src: &str) -> String {
let mut processed_lines = Vec::new();
let mut in_code_block = false;
let mut is_rust = false;
for mut line in src.lines() {
if in_code_block && is_rust && code_line_ignored_by_rustdoc(line) {
continue;
}
if let Some(header) = RUSTDOC_FENCES.into_iter().find_map(|fence| line.strip_prefix(fence))
{
in_code_block ^= true;
if in_code_block {
is_rust = is_rust_fence(header);
if is_rust {
line = "```rust";
}
}
}
if in_code_block {
let trimmed = line.trim_start();
if is_rust && trimmed.starts_with("##") {
line = &trimmed[1..];
}
}
processed_lines.push(line);
}
processed_lines.join("\n")
}
fn code_line_ignored_by_rustdoc(line: &str) -> bool {
let trimmed = line.trim();
trimmed == "#" || trimmed.starts_with("# ") || trimmed.starts_with("#\t")
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_format_docs_adds_rust() {
let comment = "```\nfn some_rust() {}\n```";
assert_eq!(format_docs_(comment), "```rust\nfn some_rust() {}\n```");
}
#[test]
fn test_format_docs_handles_plain_text() {
let comment = "```text\nthis is plain text\n```";
assert_eq!(format_docs_(comment), "```text\nthis is plain text\n```");
}
#[test]
fn test_format_docs_handles_non_rust() {
let comment = "```sh\nsupposedly shell code\n```";
assert_eq!(format_docs_(comment), "```sh\nsupposedly shell code\n```");
}
#[test]
fn test_format_docs_handles_rust_alias() {
let comment = "```ignore\nlet z = 55;\n```";
assert_eq!(format_docs_(comment), "```rust\nlet z = 55;\n```");
}
#[test]
fn test_format_docs_handles_complex_code_block_attrs() {
let comment = "```rust,no_run\nlet z = 55;\n```";
assert_eq!(format_docs_(comment), "```rust\nlet z = 55;\n```");
}
#[test]
fn test_format_docs_handles_error_codes() {
let comment = "```compile_fail,E0641\nlet b = 0 as *const _;\n```";
assert_eq!(format_docs_(comment), "```rust\nlet b = 0 as *const _;\n```");
}
#[test]
fn test_format_docs_skips_comments_in_rust_block() {
let comment =
"```rust\n # skip1\n# skip2\n#stay1\nstay2\n#\n #\n # \n #\tskip3\n\t#\t\n```";
assert_eq!(format_docs_(comment), "```rust\n#stay1\nstay2\n```");
}
#[test]
fn test_format_docs_does_not_skip_lines_if_plain_text() {
let comment =
"```text\n # stay1\n# stay2\n#stay3\nstay4\n#\n #\n # \n #\tstay5\n\t#\t\n```";
assert_eq!(
format_docs_(comment),
"```text\n # stay1\n# stay2\n#stay3\nstay4\n#\n #\n # \n #\tstay5\n\t#\t\n```",
);
}
#[test]
fn test_format_docs_keeps_comments_outside_of_rust_block() {
let comment = " # stay1\n# stay2\n#stay3\nstay4\n#\n #\n # \n #\tstay5\n\t#\t";
assert_eq!(format_docs_(comment), comment);
}
#[test]
fn test_format_docs_preserves_newlines() {
let comment = "this\nis\nmultiline";
assert_eq!(format_docs_(comment), comment);
}
#[test]
fn test_code_blocks_in_comments_marked_as_rust() {
let comment = r#"```rust
fn main(){}
```
Some comment.
```
let a = 1;
```"#;
assert_eq!(
format_docs_(comment),
"```rust\nfn main(){}\n```\nSome comment.\n```rust\nlet a = 1;\n```"
);
}
#[test]
fn test_code_blocks_in_comments_marked_as_text() {
let comment = r#"```text
filler
text
```
Some comment.
```
let a = 1;
```"#;
assert_eq!(
format_docs_(comment),
"```text\nfiller\ntext\n```\nSome comment.\n```rust\nlet a = 1;\n```"
);
}
#[test]
fn test_format_docs_handles_escape_double_hashes() {
let comment = r#"```rust
let s = "foo
## bar # baz";
```"#;
assert_eq!(format_docs_(comment), "```rust\nlet s = \"foo\n# bar # baz\";\n```");
}
#[test]
fn test_format_docs_handles_double_hashes_non_rust() {
let comment = r#"```markdown
## A second-level heading
```"#;
assert_eq!(format_docs_(comment), "```markdown\n## A second-level heading\n```");
}
}

View file

@ -6,7 +6,7 @@
use std::mem; use std::mem;
use base_db::{FileId, FileRange, SourceDatabase, SourceDatabaseExt}; use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt};
use hir::{ use hir::{
AsAssocItem, DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility, AsAssocItem, DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility,
}; };
@ -221,7 +221,6 @@ impl Definition {
} }
// def is crate root // def is crate root
// FIXME: We don't do searches for crates currently, as a crate does not actually have a single name
if let &Definition::Module(module) = self { if let &Definition::Module(module) = self {
if module.is_crate_root() { if module.is_crate_root() {
return SearchScope::reverse_dependencies(db, module.krate()); return SearchScope::reverse_dependencies(db, module.krate());
@ -393,7 +392,10 @@ impl<'a> FindUsages<'a> {
let name = match self.def { let name = match self.def {
// special case crate modules as these do not have a proper name // special case crate modules as these do not have a proper name
Definition::Module(module) if module.is_crate_root() => { Definition::Module(module) if module.is_crate_root() => {
// FIXME: This assumes the crate name is always equal to its display name when it really isn't // FIXME: This assumes the crate name is always equal to its display name when it
// really isn't
// we should instead look at the dependency edge name and recursively search our way
// up the ancestors
module module
.krate() .krate()
.display_name(self.sema.db) .display_name(self.sema.db)
@ -468,6 +470,7 @@ impl<'a> FindUsages<'a> {
}; };
for (text, file_id, search_range) in scope_files(sema, &search_scope) { for (text, file_id, search_range) in scope_files(sema, &search_scope) {
self.sema.db.unwind_if_cancelled();
let tree = Lazy::new(move || sema.parse(file_id).syntax().clone()); let tree = Lazy::new(move || sema.parse(file_id).syntax().clone());
// Search for occurrences of the items name // Search for occurrences of the items name
@ -504,6 +507,7 @@ impl<'a> FindUsages<'a> {
let finder = &Finder::new("super"); let finder = &Finder::new("super");
for (text, file_id, search_range) in scope_files(sema, &scope) { for (text, file_id, search_range) in scope_files(sema, &scope) {
self.sema.db.unwind_if_cancelled();
let tree = Lazy::new(move || sema.parse(file_id).syntax().clone()); let tree = Lazy::new(move || sema.parse(file_id).syntax().clone());
for offset in match_indices(&text, finder, search_range) { for offset in match_indices(&text, finder, search_range) {

View file

@ -1,10 +1,10 @@
//! Tools to work with format string literals for the `format_args!` family of macros. //! Tools to work with format string literals for the `format_args!` family of macros.
use crate::syntax_helpers::node_ext::macro_call_for_string_token;
use syntax::{ use syntax::{
ast::{self, IsString}, ast::{self, IsString},
TextRange, TextSize, AstNode, AstToken, TextRange, TextSize,
}; };
// FIXME: This can probably be re-implemented via the HIR?
pub fn is_format_string(string: &ast::String) -> bool { pub fn is_format_string(string: &ast::String) -> bool {
// Check if `string` is a format string argument of a macro invocation. // Check if `string` is a format string argument of a macro invocation.
// `string` is a string literal, mapped down into the innermost macro expansion. // `string` is a string literal, mapped down into the innermost macro expansion.
@ -15,19 +15,9 @@ pub fn is_format_string(string: &ast::String) -> bool {
// This setup lets us correctly highlight the components of `concat!("{}", "bla")` format // This setup lets us correctly highlight the components of `concat!("{}", "bla")` format
// strings. It still fails for `concat!("{", "}")`, but that is rare. // strings. It still fails for `concat!("{", "}")`, but that is rare.
(|| { (|| {
let name = macro_call_for_string_token(string)?.path()?.segment()?.name_ref()?; let lit = string.syntax().parent().and_then(ast::Literal::cast)?;
let fa = lit.syntax().parent().and_then(ast::FormatArgsExpr::cast)?;
if !matches!( (fa.template()? == ast::Expr::Literal(lit)).then_some(|| ())
name.text().as_str(),
"format_args" | "format_args_nl" | "const_format_args" | "panic_2015" | "panic_2021"
) {
return None;
}
// NB: we match against `panic_2015`/`panic_2021` here because they have a special-cased arm for
// `"{}"`, which otherwise wouldn't get highlighted.
Some(())
})() })()
.is_some() .is_some()
} }

View file

@ -312,7 +312,6 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) {
ast::Expr::ArrayExpr(_) ast::Expr::ArrayExpr(_)
| ast::Expr::AwaitExpr(_) | ast::Expr::AwaitExpr(_)
| ast::Expr::BinExpr(_) | ast::Expr::BinExpr(_)
| ast::Expr::BoxExpr(_)
| ast::Expr::BreakExpr(_) | ast::Expr::BreakExpr(_)
| ast::Expr::CallExpr(_) | ast::Expr::CallExpr(_)
| ast::Expr::CastExpr(_) | ast::Expr::CastExpr(_)
@ -335,7 +334,10 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) {
| ast::Expr::LetExpr(_) | ast::Expr::LetExpr(_)
| ast::Expr::UnderscoreExpr(_) | ast::Expr::UnderscoreExpr(_)
| ast::Expr::YieldExpr(_) | ast::Expr::YieldExpr(_)
| ast::Expr::YeetExpr(_) => cb(expr), | ast::Expr::YeetExpr(_)
| ast::Expr::OffsetOfExpr(_)
| ast::Expr::FormatArgsExpr(_)
| ast::Expr::AsmExpr(_) => cb(expr),
} }
} }

View file

@ -157,6 +157,7 @@ struct S;
fn macro_diag_builtin() { fn macro_diag_builtin() {
check_diagnostics( check_diagnostics(
r#" r#"
//- minicore: fmt
#[rustc_builtin_macro] #[rustc_builtin_macro]
macro_rules! env {} macro_rules! env {}
@ -166,9 +167,6 @@ macro_rules! include {}
#[rustc_builtin_macro] #[rustc_builtin_macro]
macro_rules! compile_error {} macro_rules! compile_error {}
#[rustc_builtin_macro]
macro_rules! format_args { () => {} }
fn main() { fn main() {
// Test a handful of built-in (eager) macros: // Test a handful of built-in (eager) macros:
@ -189,7 +187,7 @@ fn main() {
// Lazy: // Lazy:
format_args!(); format_args!();
//^^^^^^^^^^^ error: no rule matches input tokens //^^^^^^^^^^^ error: Syntax Error in Expansion: expected expression
} }
"#, "#,
); );

View file

@ -1,10 +1,37 @@
use either::Either;
use hir::InFile;
use syntax::{ use syntax::{
ast::{self, HasArgList}, ast::{self, HasArgList},
AstNode, TextRange, AstNode, SyntaxNodePtr, TextRange,
}; };
use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext}; use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: mismatched-tuple-struct-pat-arg-count
//
// This diagnostic is triggered if a function is invoked with an incorrect amount of arguments.
pub(crate) fn mismatched_tuple_struct_pat_arg_count(
ctx: &DiagnosticsContext<'_>,
d: &hir::MismatchedTupleStructPatArgCount,
) -> Diagnostic {
let s = if d.found == 1 { "" } else { "s" };
let s2 = if d.expected == 1 { "" } else { "s" };
let message = format!(
"this pattern has {} field{s}, but the corresponding tuple struct has {} field{s2}",
d.found, d.expected
);
Diagnostic::new(
DiagnosticCode::RustcHardError("E0023"),
message,
invalid_args_range(
ctx,
d.expr_or_pat.clone().map(|it| it.either(Into::into, Into::into)),
d.expected,
d.found,
),
)
}
// Diagnostic: mismatched-arg-count // Diagnostic: mismatched-arg-count
// //
// This diagnostic is triggered if a function is invoked with an incorrect amount of arguments. // This diagnostic is triggered if a function is invoked with an incorrect amount of arguments.
@ -14,31 +41,63 @@ pub(crate) fn mismatched_arg_count(
) -> Diagnostic { ) -> Diagnostic {
let s = if d.expected == 1 { "" } else { "s" }; let s = if d.expected == 1 { "" } else { "s" };
let message = format!("expected {} argument{s}, found {}", d.expected, d.found); let message = format!("expected {} argument{s}, found {}", d.expected, d.found);
Diagnostic::new(DiagnosticCode::RustcHardError("E0107"), message, invalid_args_range(ctx, d)) Diagnostic::new(
DiagnosticCode::RustcHardError("E0107"),
message,
invalid_args_range(ctx, d.call_expr.clone().map(Into::into), d.expected, d.found),
)
} }
fn invalid_args_range(ctx: &DiagnosticsContext<'_>, d: &hir::MismatchedArgCount) -> TextRange { fn invalid_args_range(
adjusted_display_range::<ast::Expr>(ctx, d.call_expr.clone().map(|it| it.into()), &|expr| { ctx: &DiagnosticsContext<'_>,
let arg_list = match expr { source: InFile<SyntaxNodePtr>,
ast::Expr::CallExpr(call) => call.arg_list()?, expected: usize,
ast::Expr::MethodCallExpr(call) => call.arg_list()?, found: usize,
) -> TextRange {
adjusted_display_range::<Either<ast::Expr, ast::TupleStructPat>>(ctx, source, &|expr| {
let (text_range, r_paren_token, expected_arg) = match expr {
Either::Left(ast::Expr::CallExpr(call)) => {
let arg_list = call.arg_list()?;
(
arg_list.syntax().text_range(),
arg_list.r_paren_token(),
arg_list.args().nth(expected).map(|it| it.syntax().text_range()),
)
}
Either::Left(ast::Expr::MethodCallExpr(call)) => {
let arg_list = call.arg_list()?;
(
arg_list.syntax().text_range(),
arg_list.r_paren_token(),
arg_list.args().nth(expected).map(|it| it.syntax().text_range()),
)
}
Either::Right(pat) => {
let r_paren = pat.r_paren_token()?;
let l_paren = pat.l_paren_token()?;
(
l_paren.text_range().cover(r_paren.text_range()),
Some(r_paren),
pat.fields().nth(expected).map(|it| it.syntax().text_range()),
)
}
_ => return None, _ => return None,
}; };
if d.found < d.expected { if found < expected {
if d.found == 0 { if found == 0 {
return Some(arg_list.syntax().text_range()); return Some(text_range);
} }
if let Some(r_paren) = arg_list.r_paren_token() { if let Some(r_paren) = r_paren_token {
return Some(r_paren.text_range()); return Some(r_paren.text_range());
} }
} }
if d.expected < d.found { if expected < found {
if d.expected == 0 { if expected == 0 {
return Some(arg_list.syntax().text_range()); return Some(text_range);
} }
let zip = arg_list.args().nth(d.expected).zip(arg_list.r_paren_token()); let zip = expected_arg.zip(r_paren_token);
if let Some((arg, r_paren)) = zip { if let Some((arg, r_paren)) = zip {
return Some(arg.syntax().text_range().cover(r_paren.text_range())); return Some(arg.cover(r_paren.text_range()));
} }
} }
@ -331,4 +390,21 @@ fn g() {
"#, "#,
) )
} }
#[test]
fn tuple_struct_pat() {
check_diagnostics(
r#"
struct S(u32, u32);
fn f(
S(a, b, c): S,
// ^^ error: this pattern has 3 fields, but the corresponding tuple struct has 2 fields
S(): S,
// ^^ error: this pattern has 0 fields, but the corresponding tuple struct has 2 fields
S(e, f, .., g, d): S
// ^^^^^^^^^ error: this pattern has 4 fields, but the corresponding tuple struct has 2 fields
) {}
"#,
)
}
} }

View file

@ -319,6 +319,7 @@ fn main() {
match Either::A { match Either::A {
Either::A => (), Either::A => (),
Either::B() => (), Either::B() => (),
// ^^ error: this pattern has 0 fields, but the corresponding tuple struct has 1 field
} }
} }
"#, "#,
@ -334,9 +335,11 @@ enum A { B(isize, isize), C }
fn main() { fn main() {
match A::B(1, 2) { match A::B(1, 2) {
A::B(_, _, _) => (), A::B(_, _, _) => (),
// ^^ error: this pattern has 3 fields, but the corresponding tuple struct has 2 fields
} }
match A::B(1, 2) { match A::B(1, 2) {
A::C(_) => (), A::C(_) => (),
// ^^^ error: this pattern has 1 field, but the corresponding tuple struct has 0 fields
} }
} }
"#, "#,
@ -846,6 +849,7 @@ fn main() {
struct Foo { } struct Foo { }
fn main(f: Foo) { fn main(f: Foo) {
match f { Foo { bar } => () } match f { Foo { bar } => () }
// ^^^ error: no such field
} }
"#, "#,
); );

View file

@ -76,7 +76,7 @@ pub(crate) fn unused_mut(ctx: &DiagnosticsContext<'_>, d: &hir::UnusedMut) -> Di
"variable does not need to be mutable", "variable does not need to be mutable",
ast, ast,
) )
.experimental() // Not supporting `#[allow(unused_mut)]` leads to false positive. .experimental() // Not supporting `#[allow(unused_mut)]` in proc macros leads to false positive.
.with_fixes(fixes) .with_fixes(fixes)
} }
@ -1170,6 +1170,29 @@ fn f() {
loop {} loop {}
for _ in 0..2 {} for _ in 0..2 {}
} }
"#,
);
}
#[test]
fn regression_15623() {
check_diagnostics(
r#"
//- minicore: fn
struct Foo;
impl Foo {
fn needs_mut(&mut self) {}
}
fn foo(mut foo: Foo) {
let mut call_me = || {
let 0 = 1 else { return };
foo.needs_mut();
};
call_me();
}
"#, "#,
); );
} }

Some files were not shown because too many files have changed in this diff Show more