Merge from rustc

This commit is contained in:
The Miri Conjob Bot 2024-02-19 05:12:20 +00:00
commit e0a3b6396e
194 changed files with 7121 additions and 1999 deletions

33
.github/rust.json vendored Normal file
View file

@ -0,0 +1,33 @@
{
"problemMatcher": [
{
"owner": "rustfmt",
"severity": "warning",
"pattern": [
{
"regexp": "^(Diff in (.+)) at line (\\d+):$",
"message": 1,
"file": 2,
"line": 3
}
]
},
{
"owner": "clippy",
"pattern": [
{
"regexp": "^(?:\\x1b\\[[\\d;]+m)*(warning|warn|error)(?:\\x1b\\[[\\d;]+m)*(\\[(.*)\\])?(?:\\x1b\\[[\\d;]+m)*:(?:\\x1b\\[[\\d;]+m)* ([^\\x1b]*)(?:\\x1b\\[[\\d;]+m)*$",
"severity": 1,
"message": 4,
"code": 3
},
{
"regexp": "^(?:\\x1b\\[[\\d;]+m)*\\s*(?:\\x1b\\[[\\d;]+m)*\\s*--> (?:\\x1b\\[[\\d;]+m)*(.*):(\\d*):(\\d*)(?:\\x1b\\[[\\d;]+m)*$",
"file": 1,
"line": 2,
"column": 3
}
]
}
]
}

View file

@ -15,7 +15,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v3 uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0

View file

@ -27,7 +27,7 @@ jobs:
typescript: ${{ steps.filter.outputs.typescript }} typescript: ${{ steps.filter.outputs.typescript }}
proc_macros: ${{ steps.filter.outputs.proc_macros }} proc_macros: ${{ steps.filter.outputs.proc_macros }}
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- uses: dorny/paths-filter@1441771bbfdd59dcd748680ee64ebd8faab1a242 - uses: dorny/paths-filter@1441771bbfdd59dcd748680ee64ebd8faab1a242
id: filter id: filter
with: with:
@ -56,7 +56,7 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v3 uses: actions/checkout@v4
with: with:
ref: ${{ github.event.pull_request.head.sha }} ref: ${{ github.event.pull_request.head.sha }}
@ -65,6 +65,10 @@ jobs:
rustup update --no-self-update ${{ env.RUST_CHANNEL }} rustup update --no-self-update ${{ env.RUST_CHANNEL }}
rustup component add --toolchain ${{ env.RUST_CHANNEL }} rustfmt rust-src rustup component add --toolchain ${{ env.RUST_CHANNEL }} rustfmt rust-src
rustup default ${{ env.RUST_CHANNEL }} rustup default ${{ env.RUST_CHANNEL }}
# https://github.com/actions-rust-lang/setup-rust-toolchain/blob/main/rust.json
- name: Install Rust Problem Matcher
if: matrix.os == 'ubuntu-latest'
run: echo "::add-matcher::.github/rust.json"
- name: Cache Dependencies - name: Cache Dependencies
uses: Swatinem/rust-cache@988c164c3d0e93c4dbab36aaf5bbeb77425b2894 uses: Swatinem/rust-cache@988c164c3d0e93c4dbab36aaf5bbeb77425b2894
@ -107,6 +111,10 @@ jobs:
if: matrix.os == 'windows-latest' if: matrix.os == 'windows-latest'
run: cargo clippy --all-targets -- -D clippy::disallowed_macros -D clippy::dbg_macro -D clippy::todo -D clippy::print_stdout -D clippy::print_stderr run: cargo clippy --all-targets -- -D clippy::disallowed_macros -D clippy::dbg_macro -D clippy::todo -D clippy::print_stdout -D clippy::print_stderr
- name: rustfmt
if: matrix.os == 'ubuntu-latest'
run: cargo fmt -- --check
# Weird targets to catch non-portable code # Weird targets to catch non-portable code
rust-cross: rust-cross:
if: github.repository == 'rust-lang/rust-analyzer' if: github.repository == 'rust-lang/rust-analyzer'
@ -121,7 +129,7 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Install Rust toolchain - name: Install Rust toolchain
run: | run: |
@ -153,13 +161,13 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v3 uses: actions/checkout@v4
if: needs.changes.outputs.typescript == 'true' if: needs.changes.outputs.typescript == 'true'
- name: Install Nodejs - name: Install Nodejs
uses: actions/setup-node@v3 uses: actions/setup-node@v4
with: with:
node-version: 16 node-version: 18
if: needs.changes.outputs.typescript == 'true' if: needs.changes.outputs.typescript == 'true'
- name: Install xvfb - name: Install xvfb

View file

@ -27,7 +27,7 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v3 uses: actions/checkout@v4
with: with:
ref: ${{ github.event.pull_request.head.sha }} ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 1 fetch-depth: 1

View file

@ -21,7 +21,7 @@ jobs:
rustup component add rustfmt rust-src rustup component add rustfmt rust-src
rustup default stable rustup default stable
- name: Cache cargo - name: Cache cargo
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: | path: |
~/.cargo/bin/ ~/.cargo/bin/
@ -36,10 +36,10 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Restore cargo cache - name: Restore cargo cache
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: | path: |
~/.cargo/bin/ ~/.cargo/bin/
@ -52,7 +52,7 @@ jobs:
run: cargo xtask metrics build run: cargo xtask metrics build
- name: Cache target - name: Cache target
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: target/ path: target/
key: ${{ runner.os }}-target-${{ github.sha }} key: ${{ runner.os }}-target-${{ github.sha }}
@ -73,10 +73,10 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Restore cargo cache - name: Restore cargo cache
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: | path: |
~/.cargo/bin/ ~/.cargo/bin/
@ -86,7 +86,7 @@ jobs:
key: ${{ runner.os }}-cargo-${{ github.sha }} key: ${{ runner.os }}-cargo-${{ github.sha }}
- name: Restore target cache - name: Restore target cache
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: target/ path: target/
key: ${{ runner.os }}-target-${{ github.sha }} key: ${{ runner.os }}-target-${{ github.sha }}
@ -106,7 +106,7 @@ jobs:
needs: [build_metrics, other_metrics] needs: [build_metrics, other_metrics]
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Download build metrics - name: Download build metrics
uses: actions/download-artifact@v3 uses: actions/download-artifact@v3

View file

@ -13,7 +13,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v3 uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0

View file

@ -59,7 +59,7 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v3 uses: actions/checkout@v4
with: with:
fetch-depth: ${{ env.FETCH_DEPTH }} fetch-depth: ${{ env.FETCH_DEPTH }}
@ -78,9 +78,9 @@ jobs:
rustup component add rust-src rustup component add rust-src
- name: Install Node.js - name: Install Node.js
uses: actions/setup-node@v3 uses: actions/setup-node@v4
with: with:
node-version: 16 node-version: 18
- name: Update apt repositories - name: Update apt repositories
if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'arm-unknown-linux-gnueabihf' if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'arm-unknown-linux-gnueabihf'
@ -154,7 +154,7 @@ jobs:
run: apk add --no-cache git clang lld musl-dev nodejs npm run: apk add --no-cache git clang lld musl-dev nodejs npm
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v3 uses: actions/checkout@v4
with: with:
fetch-depth: ${{ env.FETCH_DEPTH }} fetch-depth: ${{ env.FETCH_DEPTH }}
@ -188,9 +188,9 @@ jobs:
needs: ["dist", "dist-x86_64-unknown-linux-musl"] needs: ["dist", "dist-x86_64-unknown-linux-musl"]
steps: steps:
- name: Install Nodejs - name: Install Nodejs
uses: actions/setup-node@v3 uses: actions/setup-node@v4
with: with:
node-version: 16 node-version: 18
- run: echo "TAG=$(date --iso -u)" >> $GITHUB_ENV - run: echo "TAG=$(date --iso -u)" >> $GITHUB_ENV
if: github.ref == 'refs/heads/release' if: github.ref == 'refs/heads/release'
@ -199,7 +199,7 @@ jobs:
- run: 'echo "TAG: $TAG"' - run: 'echo "TAG: $TAG"'
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v3 uses: actions/checkout@v4
with: with:
fetch-depth: ${{ env.FETCH_DEPTH }} fetch-depth: ${{ env.FETCH_DEPTH }}

View file

@ -17,7 +17,7 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Install Rust toolchain - name: Install Rust toolchain
run: rustup update --no-self-update stable run: rustup update --no-self-update stable

17
Cargo.lock generated
View file

@ -1329,6 +1329,7 @@ dependencies = [
"paths", "paths",
"proc-macro-api", "proc-macro-api",
"proc-macro-test", "proc-macro-test",
"ra-ap-rustc_lexer",
"span", "span",
"stdx", "stdx",
"tt", "tt",
@ -1470,12 +1471,12 @@ dependencies = [
[[package]] [[package]]
name = "ra-ap-rustc_index" name = "ra-ap-rustc_index"
version = "0.36.0" version = "0.37.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8a41dee58608b1fc93779ea365edaa70ac9927e3335ae914b675be0fa063cd7" checksum = "df5a0ba0d08af366cf235dbe8eb7226cced7a4fe502c98aa434ccf416defd746"
dependencies = [ dependencies = [
"arrayvec", "arrayvec",
"ra-ap-rustc_index_macros 0.36.0", "ra-ap-rustc_index_macros 0.37.0",
"smallvec", "smallvec",
] ]
@ -1493,9 +1494,9 @@ dependencies = [
[[package]] [[package]]
name = "ra-ap-rustc_index_macros" name = "ra-ap-rustc_index_macros"
version = "0.36.0" version = "0.37.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fbfe98def54c4337a2f7d8233850bd5d5349972b185fe8a0db2b979164b30ed8" checksum = "1971ebf9a701e0e68387c264a32517dcb4861ad3a4862f2e2803c1121ade20d5"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -1525,11 +1526,11 @@ dependencies = [
[[package]] [[package]]
name = "ra-ap-rustc_pattern_analysis" name = "ra-ap-rustc_pattern_analysis"
version = "0.36.0" version = "0.37.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5529bffec7530b4a3425640bfdfd9b95d87c4c620f740266c0de6572561aab4" checksum = "2c3c0e7ca9c5bdc66e3b590688e237a22ac47a48e4eac7f46b05b2abbfaf0abd"
dependencies = [ dependencies = [
"ra-ap-rustc_index 0.36.0", "ra-ap-rustc_index 0.37.0",
"rustc-hash", "rustc-hash",
"rustc_apfloat", "rustc_apfloat",
"smallvec", "smallvec",

View file

@ -84,7 +84,7 @@ ra-ap-rustc_lexer = { version = "0.35.0", default-features = false }
ra-ap-rustc_parse_format = { version = "0.35.0", default-features = false } ra-ap-rustc_parse_format = { version = "0.35.0", default-features = false }
ra-ap-rustc_index = { version = "0.35.0", default-features = false } ra-ap-rustc_index = { version = "0.35.0", default-features = false }
ra-ap-rustc_abi = { version = "0.35.0", default-features = false } ra-ap-rustc_abi = { version = "0.35.0", default-features = false }
ra-ap-rustc_pattern_analysis = { version = "0.36.0", default-features = false } ra-ap-rustc_pattern_analysis = { version = "0.37.0", default-features = false }
# local crates that aren't published to crates.io. These should not have versions. # local crates that aren't published to crates.io. These should not have versions.
sourcegen = { path = "./crates/sourcegen" } sourcegen = { path = "./crates/sourcegen" }

View file

@ -11,7 +11,6 @@ use std::{fmt, mem, ops, str::FromStr};
use cfg::CfgOptions; use cfg::CfgOptions;
use la_arena::{Arena, Idx, RawIdx}; use la_arena::{Arena, Idx, RawIdx};
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use semver::Version;
use syntax::SmolStr; use syntax::SmolStr;
use triomphe::Arc; use triomphe::Arc;
use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath}; use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
@ -243,6 +242,7 @@ impl CrateDisplayName {
CrateDisplayName { crate_name, canonical_name } CrateDisplayName { crate_name, canonical_name }
} }
} }
pub type TargetLayoutLoadResult = Result<Arc<str>, Arc<str>>; pub type TargetLayoutLoadResult = Result<Arc<str>, Arc<str>>;
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
@ -291,71 +291,6 @@ pub struct CrateData {
pub dependencies: Vec<Dependency>, pub dependencies: Vec<Dependency>,
pub origin: CrateOrigin, pub origin: CrateOrigin,
pub is_proc_macro: bool, pub is_proc_macro: bool,
// FIXME: These things should not be per crate! These are more per workspace crate graph level
// things. This info does need to be somewhat present though as to prevent deduplication from
// happening across different workspaces with different layouts.
pub target_layout: TargetLayoutLoadResult,
pub toolchain: Option<Version>,
}
impl CrateData {
/// Check if [`other`] is almost equal to [`self`] ignoring `CrateOrigin` value.
pub fn eq_ignoring_origin_and_deps(&self, other: &CrateData, ignore_dev_deps: bool) -> bool {
// This method has some obscure bits. These are mostly there to be compliant with
// some patches. References to the patches are given.
if self.root_file_id != other.root_file_id {
return false;
}
if self.display_name != other.display_name {
return false;
}
if self.is_proc_macro != other.is_proc_macro {
return false;
}
if self.edition != other.edition {
return false;
}
if self.version != other.version {
return false;
}
let mut opts = self.cfg_options.difference(&other.cfg_options);
if let Some(it) = opts.next() {
// Don't care if rust_analyzer CfgAtom is the only cfg in the difference set of self's and other's cfgs.
// https://github.com/rust-lang/rust-analyzer/blob/0840038f02daec6ba3238f05d8caa037d28701a0/crates/project-model/src/workspace.rs#L894
if it.to_string() != "rust_analyzer" {
return false;
}
if opts.next().is_some() {
return false;
}
}
if self.env != other.env {
return false;
}
let slf_deps = self.dependencies.iter();
let other_deps = other.dependencies.iter();
if ignore_dev_deps {
return slf_deps
.clone()
.filter(|it| it.kind != DependencyKind::Dev)
.eq(other_deps.clone().filter(|it| it.kind != DependencyKind::Dev));
}
slf_deps.eq(other_deps)
}
pub fn channel(&self) -> Option<ReleaseChannel> {
self.toolchain.as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre))
}
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
@ -398,32 +333,22 @@ pub enum DependencyKind {
pub struct Dependency { pub struct Dependency {
pub crate_id: CrateId, pub crate_id: CrateId,
pub name: CrateName, pub name: CrateName,
kind: DependencyKind,
prelude: bool, prelude: bool,
} }
impl Dependency { impl Dependency {
pub fn new(name: CrateName, crate_id: CrateId, kind: DependencyKind) -> Self { pub fn new(name: CrateName, crate_id: CrateId) -> Self {
Self { name, crate_id, prelude: true, kind } Self { name, crate_id, prelude: true }
} }
pub fn with_prelude( pub fn with_prelude(name: CrateName, crate_id: CrateId, prelude: bool) -> Self {
name: CrateName, Self { name, crate_id, prelude }
crate_id: CrateId,
prelude: bool,
kind: DependencyKind,
) -> Self {
Self { name, crate_id, prelude, kind }
} }
/// Whether this dependency is to be added to the depending crate's extern prelude. /// Whether this dependency is to be added to the depending crate's extern prelude.
pub fn is_prelude(&self) -> bool { pub fn is_prelude(&self) -> bool {
self.prelude self.prelude
} }
pub fn kind(&self) -> DependencyKind {
self.kind
}
} }
impl CrateGraph { impl CrateGraph {
@ -438,8 +363,6 @@ impl CrateGraph {
env: Env, env: Env,
is_proc_macro: bool, is_proc_macro: bool,
origin: CrateOrigin, origin: CrateOrigin,
target_layout: Result<Arc<str>, Arc<str>>,
toolchain: Option<Version>,
) -> CrateId { ) -> CrateId {
let data = CrateData { let data = CrateData {
root_file_id, root_file_id,
@ -451,9 +374,7 @@ impl CrateGraph {
env, env,
dependencies: Vec::new(), dependencies: Vec::new(),
origin, origin,
target_layout,
is_proc_macro, is_proc_macro,
toolchain,
}; };
self.arena.alloc(data) self.arena.alloc(data)
} }
@ -523,6 +444,10 @@ impl CrateGraph {
self.arena.is_empty() self.arena.is_empty()
} }
pub fn len(&self) -> usize {
self.arena.len()
}
pub fn iter(&self) -> impl Iterator<Item = CrateId> + '_ { pub fn iter(&self) -> impl Iterator<Item = CrateId> + '_ {
self.arena.iter().map(|(idx, _)| idx) self.arena.iter().map(|(idx, _)| idx)
} }
@ -623,13 +548,17 @@ impl CrateGraph {
/// ///
/// This will deduplicate the crates of the graph where possible. /// This will deduplicate the crates of the graph where possible.
/// Note that for deduplication to fully work, `self`'s crate dependencies must be sorted by crate id. /// Note that for deduplication to fully work, `self`'s crate dependencies must be sorted by crate id.
/// If the crate dependencies were sorted, the resulting graph from this `extend` call will also have the crate dependencies sorted. /// If the crate dependencies were sorted, the resulting graph from this `extend` call will also
/// have the crate dependencies sorted.
///
/// Returns a mapping from `other`'s crate ids to the new crate ids in `self`.
pub fn extend( pub fn extend(
&mut self, &mut self,
mut other: CrateGraph, mut other: CrateGraph,
proc_macros: &mut ProcMacroPaths, proc_macros: &mut ProcMacroPaths,
on_finished: impl FnOnce(&FxHashMap<CrateId, CrateId>), merge: impl Fn((CrateId, &mut CrateData), (CrateId, &CrateData)) -> bool,
) { ) -> FxHashMap<CrateId, CrateId> {
let m = self.len();
let topo = other.crates_in_topological_order(); let topo = other.crates_in_topological_order();
let mut id_map: FxHashMap<CrateId, CrateId> = FxHashMap::default(); let mut id_map: FxHashMap<CrateId, CrateId> = FxHashMap::default();
for topo in topo { for topo in topo {
@ -637,51 +566,21 @@ impl CrateGraph {
crate_data.dependencies.iter_mut().for_each(|dep| dep.crate_id = id_map[&dep.crate_id]); crate_data.dependencies.iter_mut().for_each(|dep| dep.crate_id = id_map[&dep.crate_id]);
crate_data.dependencies.sort_by_key(|dep| dep.crate_id); crate_data.dependencies.sort_by_key(|dep| dep.crate_id);
let res = self.arena.iter().find_map(|(id, data)| { let res = self
match (&data.origin, &crate_data.origin) { .arena
(a, b) if a == b => { .iter_mut()
if data.eq_ignoring_origin_and_deps(crate_data, false) { .take(m)
return Some((id, false)); .find_map(|(id, data)| merge((id, data), (topo, &crate_data)).then_some(id));
}
}
(a @ CrateOrigin::Local { .. }, CrateOrigin::Library { .. })
| (a @ CrateOrigin::Library { .. }, CrateOrigin::Local { .. }) => {
// If the origins differ, check if the two crates are equal without
// considering the dev dependencies, if they are, they most likely are in
// different loaded workspaces which may cause issues. We keep the local
// version and discard the library one as the local version may have
// dev-dependencies that we want to keep resolving. See #15656 for more
// information.
if data.eq_ignoring_origin_and_deps(crate_data, true) {
return Some((id, !a.is_local()));
}
}
(_, _) => return None,
}
None let new_id =
}); if let Some(res) = res { res } else { self.arena.alloc(crate_data.clone()) };
id_map.insert(topo, new_id);
if let Some((res, should_update_lib_to_local)) = res {
id_map.insert(topo, res);
if should_update_lib_to_local {
assert!(self.arena[res].origin.is_lib());
assert!(crate_data.origin.is_local());
self.arena[res].origin = crate_data.origin.clone();
// Move local's dev dependencies into the newly-local-formerly-lib crate.
self.arena[res].dependencies = crate_data.dependencies.clone();
}
} else {
let id = self.arena.alloc(crate_data.clone());
id_map.insert(topo, id);
}
} }
*proc_macros = *proc_macros =
mem::take(proc_macros).into_iter().map(|(id, macros)| (id_map[&id], macros)).collect(); mem::take(proc_macros).into_iter().map(|(id, macros)| (id_map[&id], macros)).collect();
on_finished(&id_map); id_map
} }
fn find_path( fn find_path(
@ -719,11 +618,9 @@ impl CrateGraph {
match (cfg_if, std) { match (cfg_if, std) {
(Some(cfg_if), Some(std)) => { (Some(cfg_if), Some(std)) => {
self.arena[cfg_if].dependencies.clear(); self.arena[cfg_if].dependencies.clear();
self.arena[std].dependencies.push(Dependency::new( self.arena[std]
CrateName::new("cfg_if").unwrap(), .dependencies
cfg_if, .push(Dependency::new(CrateName::new("cfg_if").unwrap(), cfg_if));
DependencyKind::Normal,
));
true true
} }
_ => false, _ => false,
@ -871,7 +768,7 @@ impl fmt::Display for CyclicDependenciesError {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::{CrateOrigin, DependencyKind}; use crate::CrateOrigin;
use super::{CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId}; use super::{CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId};
@ -888,8 +785,6 @@ mod tests {
Env::default(), Env::default(),
false, false,
CrateOrigin::Local { repo: None, name: None }, CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
); );
let crate2 = graph.add_crate_root( let crate2 = graph.add_crate_root(
FileId::from_raw(2u32), FileId::from_raw(2u32),
@ -901,8 +796,6 @@ mod tests {
Env::default(), Env::default(),
false, false,
CrateOrigin::Local { repo: None, name: None }, CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
); );
let crate3 = graph.add_crate_root( let crate3 = graph.add_crate_root(
FileId::from_raw(3u32), FileId::from_raw(3u32),
@ -914,26 +807,15 @@ mod tests {
Env::default(), Env::default(),
false, false,
CrateOrigin::Local { repo: None, name: None }, CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
); );
assert!(graph assert!(graph
.add_dep( .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,))
crate1,
Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal)
)
.is_ok()); .is_ok());
assert!(graph assert!(graph
.add_dep( .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3,))
crate2,
Dependency::new(CrateName::new("crate3").unwrap(), crate3, DependencyKind::Normal)
)
.is_ok()); .is_ok());
assert!(graph assert!(graph
.add_dep( .add_dep(crate3, Dependency::new(CrateName::new("crate1").unwrap(), crate1,))
crate3,
Dependency::new(CrateName::new("crate1").unwrap(), crate1, DependencyKind::Normal)
)
.is_err()); .is_err());
} }
@ -950,8 +832,6 @@ mod tests {
Env::default(), Env::default(),
false, false,
CrateOrigin::Local { repo: None, name: None }, CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
); );
let crate2 = graph.add_crate_root( let crate2 = graph.add_crate_root(
FileId::from_raw(2u32), FileId::from_raw(2u32),
@ -963,20 +843,12 @@ mod tests {
Env::default(), Env::default(),
false, false,
CrateOrigin::Local { repo: None, name: None }, CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
); );
assert!(graph assert!(graph
.add_dep( .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,))
crate1,
Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal)
)
.is_ok()); .is_ok());
assert!(graph assert!(graph
.add_dep( .add_dep(crate2, Dependency::new(CrateName::new("crate2").unwrap(), crate2,))
crate2,
Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal)
)
.is_err()); .is_err());
} }
@ -993,8 +865,6 @@ mod tests {
Env::default(), Env::default(),
false, false,
CrateOrigin::Local { repo: None, name: None }, CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
); );
let crate2 = graph.add_crate_root( let crate2 = graph.add_crate_root(
FileId::from_raw(2u32), FileId::from_raw(2u32),
@ -1006,8 +876,6 @@ mod tests {
Env::default(), Env::default(),
false, false,
CrateOrigin::Local { repo: None, name: None }, CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
); );
let crate3 = graph.add_crate_root( let crate3 = graph.add_crate_root(
FileId::from_raw(3u32), FileId::from_raw(3u32),
@ -1019,20 +887,12 @@ mod tests {
Env::default(), Env::default(),
false, false,
CrateOrigin::Local { repo: None, name: None }, CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
); );
assert!(graph assert!(graph
.add_dep( .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,))
crate1,
Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal)
)
.is_ok()); .is_ok());
assert!(graph assert!(graph
.add_dep( .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3,))
crate2,
Dependency::new(CrateName::new("crate3").unwrap(), crate3, DependencyKind::Normal)
)
.is_ok()); .is_ok());
} }
@ -1049,8 +909,6 @@ mod tests {
Env::default(), Env::default(),
false, false,
CrateOrigin::Local { repo: None, name: None }, CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
); );
let crate2 = graph.add_crate_root( let crate2 = graph.add_crate_root(
FileId::from_raw(2u32), FileId::from_raw(2u32),
@ -1062,26 +920,16 @@ mod tests {
Env::default(), Env::default(),
false, false,
CrateOrigin::Local { repo: None, name: None }, CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
); );
assert!(graph assert!(graph
.add_dep( .add_dep(
crate1, crate1,
Dependency::new( Dependency::new(CrateName::normalize_dashes("crate-name-with-dashes"), crate2,)
CrateName::normalize_dashes("crate-name-with-dashes"),
crate2,
DependencyKind::Normal
)
) )
.is_ok()); .is_ok());
assert_eq!( assert_eq!(
graph[crate1].dependencies, graph[crate1].dependencies,
vec![Dependency::new( vec![Dependency::new(CrateName::new("crate_name_with_dashes").unwrap(), crate2,)]
CrateName::new("crate_name_with_dashes").unwrap(),
crate2,
DependencyKind::Normal
)]
); );
} }
} }

View file

@ -62,6 +62,20 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug {
/// The crate graph. /// The crate graph.
#[salsa::input] #[salsa::input]
fn crate_graph(&self) -> Arc<CrateGraph>; fn crate_graph(&self) -> Arc<CrateGraph>;
// FIXME: Consider removing this, making HirDatabase::target_data_layout an input query
#[salsa::input]
fn data_layout(&self, krate: CrateId) -> TargetLayoutLoadResult;
#[salsa::input]
fn toolchain(&self, krate: CrateId) -> Option<Version>;
#[salsa::transparent]
fn toolchain_channel(&self, krate: CrateId) -> Option<ReleaseChannel>;
}
fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option<ReleaseChannel> {
db.toolchain(krate).as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre))
} }
fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> { fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {

View file

@ -14,7 +14,7 @@ use std::{
use command_group::{CommandGroup, GroupChild}; use command_group::{CommandGroup, GroupChild};
use crossbeam_channel::{never, select, unbounded, Receiver, Sender}; use crossbeam_channel::{never, select, unbounded, Receiver, Sender};
use paths::AbsPathBuf; use paths::{AbsPath, AbsPathBuf};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use serde::Deserialize; use serde::Deserialize;
use stdx::process::streaming_output; use stdx::process::streaming_output;
@ -23,6 +23,7 @@ pub use cargo_metadata::diagnostic::{
Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan, Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan,
DiagnosticSpanMacroExpansion, DiagnosticSpanMacroExpansion,
}; };
use toolchain::Tool;
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
pub enum InvocationStrategy { pub enum InvocationStrategy {
@ -89,9 +90,10 @@ impl FlycheckHandle {
id: usize, id: usize,
sender: Box<dyn Fn(Message) + Send>, sender: Box<dyn Fn(Message) + Send>,
config: FlycheckConfig, config: FlycheckConfig,
sysroot_root: Option<AbsPathBuf>,
workspace_root: AbsPathBuf, workspace_root: AbsPathBuf,
) -> FlycheckHandle { ) -> FlycheckHandle {
let actor = FlycheckActor::new(id, sender, config, workspace_root); let actor = FlycheckActor::new(id, sender, config, sysroot_root, workspace_root);
let (sender, receiver) = unbounded::<StateChange>(); let (sender, receiver) = unbounded::<StateChange>();
let thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker) let thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker)
.name("Flycheck".to_owned()) .name("Flycheck".to_owned())
@ -101,13 +103,15 @@ impl FlycheckHandle {
} }
/// Schedule a re-start of the cargo check worker to do a workspace wide check. /// Schedule a re-start of the cargo check worker to do a workspace wide check.
pub fn restart_workspace(&self) { pub fn restart_workspace(&self, saved_file: Option<AbsPathBuf>) {
self.sender.send(StateChange::Restart(None)).unwrap(); self.sender.send(StateChange::Restart { package: None, saved_file }).unwrap();
} }
/// Schedule a re-start of the cargo check worker to do a package wide check. /// Schedule a re-start of the cargo check worker to do a package wide check.
pub fn restart_for_package(&self, package: String) { pub fn restart_for_package(&self, package: String) {
self.sender.send(StateChange::Restart(Some(package))).unwrap(); self.sender
.send(StateChange::Restart { package: Some(package), saved_file: None })
.unwrap();
} }
/// Stop this cargo check worker. /// Stop this cargo check worker.
@ -158,7 +162,7 @@ pub enum Progress {
} }
enum StateChange { enum StateChange {
Restart(Option<String>), Restart { package: Option<String>, saved_file: Option<AbsPathBuf> },
Cancel, Cancel,
} }
@ -171,6 +175,7 @@ struct FlycheckActor {
/// Either the workspace root of the workspace we are flychecking, /// Either the workspace root of the workspace we are flychecking,
/// or the project root of the project. /// or the project root of the project.
root: AbsPathBuf, root: AbsPathBuf,
sysroot_root: Option<AbsPathBuf>,
/// CargoHandle exists to wrap around the communication needed to be able to /// CargoHandle exists to wrap around the communication needed to be able to
/// run `cargo check` without blocking. Currently the Rust standard library /// run `cargo check` without blocking. Currently the Rust standard library
/// doesn't provide a way to read sub-process output without blocking, so we /// doesn't provide a way to read sub-process output without blocking, so we
@ -184,15 +189,25 @@ enum Event {
CheckEvent(Option<CargoMessage>), CheckEvent(Option<CargoMessage>),
} }
const SAVED_FILE_PLACEHOLDER: &str = "$saved_file";
impl FlycheckActor { impl FlycheckActor {
fn new( fn new(
id: usize, id: usize,
sender: Box<dyn Fn(Message) + Send>, sender: Box<dyn Fn(Message) + Send>,
config: FlycheckConfig, config: FlycheckConfig,
sysroot_root: Option<AbsPathBuf>,
workspace_root: AbsPathBuf, workspace_root: AbsPathBuf,
) -> FlycheckActor { ) -> FlycheckActor {
tracing::info!(%id, ?workspace_root, "Spawning flycheck"); tracing::info!(%id, ?workspace_root, "Spawning flycheck");
FlycheckActor { id, sender, config, root: workspace_root, command_handle: None } FlycheckActor {
id,
sender,
config,
sysroot_root,
root: workspace_root,
command_handle: None,
}
} }
fn report_progress(&self, progress: Progress) { fn report_progress(&self, progress: Progress) {
@ -218,7 +233,7 @@ impl FlycheckActor {
tracing::debug!(flycheck_id = self.id, "flycheck cancelled"); tracing::debug!(flycheck_id = self.id, "flycheck cancelled");
self.cancel_check_process(); self.cancel_check_process();
} }
Event::RequestStateChange(StateChange::Restart(package)) => { Event::RequestStateChange(StateChange::Restart { package, saved_file }) => {
// Cancel the previously spawned process // Cancel the previously spawned process
self.cancel_check_process(); self.cancel_check_process();
while let Ok(restart) = inbox.recv_timeout(Duration::from_millis(50)) { while let Ok(restart) = inbox.recv_timeout(Duration::from_millis(50)) {
@ -228,7 +243,11 @@ impl FlycheckActor {
} }
} }
let command = self.check_command(package.as_deref()); let command =
match self.check_command(package.as_deref(), saved_file.as_deref()) {
Some(c) => c,
None => continue,
};
let formatted_command = format!("{:?}", command); let formatted_command = format!("{:?}", command);
tracing::debug!(?command, "will restart flycheck"); tracing::debug!(?command, "will restart flycheck");
@ -302,7 +321,14 @@ impl FlycheckActor {
} }
} }
fn check_command(&self, package: Option<&str>) -> Command { /// Construct a `Command` object for checking the user's code. If the user
/// has specified a custom command with placeholders that we cannot fill,
/// return None.
fn check_command(
&self,
package: Option<&str>,
saved_file: Option<&AbsPath>,
) -> Option<Command> {
let (mut cmd, args) = match &self.config { let (mut cmd, args) = match &self.config {
FlycheckConfig::CargoCommand { FlycheckConfig::CargoCommand {
command, command,
@ -316,7 +342,10 @@ impl FlycheckActor {
ansi_color_output, ansi_color_output,
target_dir, target_dir,
} => { } => {
let mut cmd = Command::new(toolchain::cargo()); let mut cmd = Command::new(Tool::Cargo.path());
if let Some(sysroot_root) = &self.sysroot_root {
cmd.env("RUSTUP_TOOLCHAIN", AsRef::<std::path::Path>::as_ref(sysroot_root));
}
cmd.arg(command); cmd.arg(command);
cmd.current_dir(&self.root); cmd.current_dir(&self.root);
@ -355,7 +384,7 @@ impl FlycheckActor {
cmd.arg("--target-dir").arg(target_dir); cmd.arg("--target-dir").arg(target_dir);
} }
cmd.envs(extra_env); cmd.envs(extra_env);
(cmd, extra_args) (cmd, extra_args.clone())
} }
FlycheckConfig::CustomCommand { FlycheckConfig::CustomCommand {
command, command,
@ -384,12 +413,34 @@ impl FlycheckActor {
} }
} }
(cmd, args) if args.contains(&SAVED_FILE_PLACEHOLDER.to_owned()) {
// If the custom command has a $saved_file placeholder, and
// we're saving a file, replace the placeholder in the arguments.
if let Some(saved_file) = saved_file {
let args = args
.iter()
.map(|arg| {
if arg == SAVED_FILE_PLACEHOLDER {
saved_file.to_string()
} else {
arg.clone()
}
})
.collect();
(cmd, args)
} else {
// The custom command has a $saved_file placeholder,
// but we had an IDE event that wasn't a file save. Do nothing.
return None;
}
} else {
(cmd, args.clone())
}
} }
}; };
cmd.args(args); cmd.args(args);
cmd Some(cmd)
} }
fn send(&self, check_task: Message) { fn send(&self, check_task: Message) {

View file

@ -377,27 +377,39 @@ impl AttrsWithOwner {
AttrDefId::GenericParamId(it) => match it { AttrDefId::GenericParamId(it) => match it {
GenericParamId::ConstParamId(it) => { GenericParamId::ConstParamId(it) => {
let src = it.parent().child_source(db); let src = it.parent().child_source(db);
RawAttrs::from_attrs_owner( // FIXME: We should be never getting `None` here.
db.upcast(), match src.value.get(it.local_id()) {
src.with_value(&src.value[it.local_id()]), Some(val) => RawAttrs::from_attrs_owner(
db.span_map(src.file_id).as_ref(), db.upcast(),
) src.with_value(val),
db.span_map(src.file_id).as_ref(),
),
None => RawAttrs::EMPTY,
}
} }
GenericParamId::TypeParamId(it) => { GenericParamId::TypeParamId(it) => {
let src = it.parent().child_source(db); let src = it.parent().child_source(db);
RawAttrs::from_attrs_owner( // FIXME: We should be never getting `None` here.
db.upcast(), match src.value.get(it.local_id()) {
src.with_value(&src.value[it.local_id()]), Some(val) => RawAttrs::from_attrs_owner(
db.span_map(src.file_id).as_ref(), db.upcast(),
) src.with_value(val),
db.span_map(src.file_id).as_ref(),
),
None => RawAttrs::EMPTY,
}
} }
GenericParamId::LifetimeParamId(it) => { GenericParamId::LifetimeParamId(it) => {
let src = it.parent.child_source(db); let src = it.parent.child_source(db);
RawAttrs::from_attrs_owner( // FIXME: We should be never getting `None` here.
db.upcast(), match src.value.get(it.local_id) {
src.with_value(&src.value[it.local_id]), Some(val) => RawAttrs::from_attrs_owner(
db.span_map(src.file_id).as_ref(), db.upcast(),
) src.with_value(val),
db.span_map(src.file_id).as_ref(),
),
None => RawAttrs::EMPTY,
}
} }
}, },
AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it), AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it),

View file

@ -416,6 +416,11 @@ impl ExprCollector<'_> {
let expr = e.expr().map(|e| self.collect_expr(e)); let expr = e.expr().map(|e| self.collect_expr(e));
self.alloc_expr(Expr::Return { expr }, syntax_ptr) self.alloc_expr(Expr::Return { expr }, syntax_ptr)
} }
ast::Expr::BecomeExpr(e) => {
let expr =
e.expr().map(|e| self.collect_expr(e)).unwrap_or_else(|| self.missing_expr());
self.alloc_expr(Expr::Become { expr }, syntax_ptr)
}
ast::Expr::YieldExpr(e) => { ast::Expr::YieldExpr(e) => {
self.is_lowering_coroutine = true; self.is_lowering_coroutine = true;
let expr = e.expr().map(|e| self.collect_expr(e)); let expr = e.expr().map(|e| self.collect_expr(e));
@ -1000,10 +1005,6 @@ impl ExprCollector<'_> {
krate: *krate, krate: *krate,
}); });
} }
Some(ExpandError::RecursionOverflowPoisoned) => {
// Recursion limit has been reached in the macro expansion tree, but not in
// this very macro call. Don't add diagnostics to avoid duplication.
}
Some(err) => { Some(err) => {
self.source_map.diagnostics.push(BodyDiagnostic::MacroError { self.source_map.diagnostics.push(BodyDiagnostic::MacroError {
node: InFile::new(outer_file, syntax_ptr), node: InFile::new(outer_file, syntax_ptr),
@ -1112,7 +1113,7 @@ impl ExprCollector<'_> {
statements.push(Statement::Expr { expr, has_semi }); statements.push(Statement::Expr { expr, has_semi });
} }
} }
ast::Stmt::Item(_item) => (), ast::Stmt::Item(_item) => statements.push(Statement::Item),
} }
} }

View file

@ -6,7 +6,7 @@ use itertools::Itertools;
use crate::{ use crate::{
hir::{ hir::{
Array, BindingAnnotation, BindingId, CaptureBy, ClosureKind, Literal, LiteralOrConst, Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, LiteralOrConst,
Movability, Statement, Movability, Statement,
}, },
pretty::{print_generic_args, print_path, print_type_ref}, pretty::{print_generic_args, print_path, print_type_ref},
@ -261,6 +261,11 @@ impl Printer<'_> {
self.print_expr(*expr); self.print_expr(*expr);
} }
} }
Expr::Become { expr } => {
w!(self, "become");
self.whitespace();
self.print_expr(*expr);
}
Expr::Yield { expr } => { Expr::Yield { expr } => {
w!(self, "yield"); w!(self, "yield");
if let Some(expr) = expr { if let Some(expr) = expr {
@ -623,6 +628,7 @@ impl Printer<'_> {
} }
wln!(self); wln!(self);
} }
Statement::Item => (),
} }
} }

View file

@ -197,6 +197,7 @@ fn compute_block_scopes(
Statement::Expr { expr, .. } => { Statement::Expr { expr, .. } => {
compute_expr_scopes(*expr, body, scopes, scope); compute_expr_scopes(*expr, body, scopes, scope);
} }
Statement::Item => (),
} }
} }
if let Some(expr) = tail { if let Some(expr) = tail {

View file

@ -634,7 +634,6 @@ impl<'a> AssocItemCollector<'a> {
attr, attr,
) { ) {
Ok(ResolvedAttr::Macro(call_id)) => { Ok(ResolvedAttr::Macro(call_id)) => {
self.attr_calls.push((ast_id, call_id));
// If proc attribute macro expansion is disabled, skip expanding it here // If proc attribute macro expansion is disabled, skip expanding it here
if !self.db.expand_proc_attr_macros() { if !self.db.expand_proc_attr_macros() {
continue 'attrs; continue 'attrs;
@ -647,10 +646,21 @@ impl<'a> AssocItemCollector<'a> {
// disabled. This is analogous to the handling in // disabled. This is analogous to the handling in
// `DefCollector::collect_macros`. // `DefCollector::collect_macros`.
if exp.is_dummy() { if exp.is_dummy() {
self.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
self.module_id.local_id,
loc.kind,
loc.def.krate,
));
continue 'attrs;
}
if exp.is_disabled() {
continue 'attrs; continue 'attrs;
} }
} }
self.attr_calls.push((ast_id, call_id));
let res = let res =
self.expander.enter_expand_id::<ast::MacroItems>(self.db, call_id); self.expander.enter_expand_id::<ast::MacroItems>(self.db, call_id);
self.collect_macro_items(res, &|| loc.kind.clone()); self.collect_macro_items(res, &|| loc.kind.clone());

View file

@ -140,13 +140,11 @@ impl Expander {
// The overflow error should have been reported when it occurred (see the next branch), // The overflow error should have been reported when it occurred (see the next branch),
// so don't return overflow error here to avoid diagnostics duplication. // so don't return overflow error here to avoid diagnostics duplication.
cov_mark::hit!(overflow_but_not_me); cov_mark::hit!(overflow_but_not_me);
return ExpandResult::only_err(ExpandError::RecursionOverflowPoisoned); return ExpandResult::ok(None);
} else if self.recursion_limit.check(self.recursion_depth as usize + 1).is_err() { } else if self.recursion_limit.check(self.recursion_depth as usize + 1).is_err() {
self.recursion_depth = u32::MAX; self.recursion_depth = u32::MAX;
cov_mark::hit!(your_stack_belongs_to_me); cov_mark::hit!(your_stack_belongs_to_me);
return ExpandResult::only_err(ExpandError::other( return ExpandResult::only_err(ExpandError::RecursionOverflow);
"reached recursion limit during macro expansion",
));
} }
let ExpandResult { value, err } = op(self); let ExpandResult { value, err } = op(self);

View file

@ -447,18 +447,25 @@ fn select_best_path(
} }
const STD_CRATES: [Name; 3] = [known::std, known::core, known::alloc]; const STD_CRATES: [Name; 3] = [known::std, known::core, known::alloc];
let choose = |new_path: (ModPath, _), old_path: (ModPath, _)| { let choose = |new: (ModPath, _), old: (ModPath, _)| {
let new_has_prelude = new_path.0.segments().iter().any(|seg| seg == &known::prelude); let (new_path, _) = &new;
let old_has_prelude = old_path.0.segments().iter().any(|seg| seg == &known::prelude); let (old_path, _) = &old;
let new_has_prelude = new_path.segments().iter().any(|seg| seg == &known::prelude);
let old_has_prelude = old_path.segments().iter().any(|seg| seg == &known::prelude);
match (new_has_prelude, old_has_prelude, prefer_prelude) { match (new_has_prelude, old_has_prelude, prefer_prelude) {
(true, false, true) | (false, true, false) => new_path, (true, false, true) | (false, true, false) => new,
(true, false, false) | (false, true, true) => old_path, (true, false, false) | (false, true, true) => old,
// no prelude difference in the paths, so pick the smaller one // no prelude difference in the paths, so pick the shorter one
(true, true, _) | (false, false, _) => { (true, true, _) | (false, false, _) => {
if new_path.0.len() < old_path.0.len() { let new_path_is_shorter = new_path
new_path .len()
.cmp(&old_path.len())
.then_with(|| new_path.textual_len().cmp(&old_path.textual_len()))
.is_lt();
if new_path_is_shorter {
new
} else { } else {
old_path old
} }
} }
} }
@ -469,8 +476,8 @@ fn select_best_path(
let rank = match prefer_no_std { let rank = match prefer_no_std {
false => |name: &Name| match name { false => |name: &Name| match name {
name if name == &known::core => 0, name if name == &known::core => 0,
name if name == &known::alloc => 0, name if name == &known::alloc => 1,
name if name == &known::std => 1, name if name == &known::std => 2,
_ => unreachable!(), _ => unreachable!(),
}, },
true => |name: &Name| match name { true => |name: &Name| match name {
@ -1539,4 +1546,38 @@ pub mod foo {
"krate::prelude::Foo", "krate::prelude::Foo",
); );
} }
#[test]
fn respect_segment_length() {
check_found_path(
r#"
//- /main.rs crate:main deps:petgraph
$0
//- /petgraph.rs crate:petgraph
pub mod graph {
pub use crate::graph_impl::{
NodeIndex
};
}
mod graph_impl {
pub struct NodeIndex<Ix>(Ix);
}
pub mod stable_graph {
#[doc(no_inline)]
pub use crate::graph::{NodeIndex};
}
pub mod prelude {
#[doc(no_inline)]
pub use crate::graph::{NodeIndex};
}
"#,
"petgraph::graph::NodeIndex",
"petgraph::graph::NodeIndex",
"petgraph::graph::NodeIndex",
"petgraph::graph::NodeIndex",
);
}
} }

View file

@ -182,6 +182,7 @@ pub enum Expr {
tail: Option<ExprId>, tail: Option<ExprId>,
}, },
Const(ConstBlockId), Const(ConstBlockId),
// FIXME: Fold this into Block with an unsafe flag?
Unsafe { Unsafe {
id: Option<BlockId>, id: Option<BlockId>,
statements: Box<[Statement]>, statements: Box<[Statement]>,
@ -216,6 +217,9 @@ pub enum Expr {
Return { Return {
expr: Option<ExprId>, expr: Option<ExprId>,
}, },
Become {
expr: ExprId,
},
Yield { Yield {
expr: Option<ExprId>, expr: Option<ExprId>,
}, },
@ -349,6 +353,9 @@ pub enum Statement {
expr: ExprId, expr: ExprId,
has_semi: bool, has_semi: bool,
}, },
// At the moment, we only use this to figure out if a return expression
// is really the last statement of a block. See #16566
Item,
} }
impl Expr { impl Expr {
@ -382,6 +389,7 @@ impl Expr {
} }
} }
Statement::Expr { expr: expression, .. } => f(*expression), Statement::Expr { expr: expression, .. } => f(*expression),
Statement::Item => (),
} }
} }
if let &Some(expr) = tail { if let &Some(expr) = tail {
@ -410,6 +418,7 @@ impl Expr {
f(expr); f(expr);
} }
} }
Expr::Become { expr } => f(*expr),
Expr::RecordLit { fields, spread, .. } => { Expr::RecordLit { fields, spread, .. } => {
for field in fields.iter() { for field in fields.iter() {
f(field.expr); f(field.expr);

View file

@ -3,7 +3,7 @@
use std::{fmt, hash::BuildHasherDefault}; use std::{fmt, hash::BuildHasherDefault};
use base_db::CrateId; use base_db::CrateId;
use fst::{self, raw::IndexedValue, Automaton, Streamer}; use fst::{raw::IndexedValue, Automaton, Streamer};
use hir_expand::name::Name; use hir_expand::name::Name;
use indexmap::IndexMap; use indexmap::IndexMap;
use itertools::Itertools; use itertools::Itertools;

View file

@ -2,12 +2,12 @@
use std::collections::hash_map::Entry; use std::collections::hash_map::Entry;
use hir_expand::{ast_id_map::AstIdMap, span_map::SpanMapRef, HirFileId}; use hir_expand::{ast_id_map::AstIdMap, span_map::SpanMapRef};
use syntax::ast::{self, HasModuleItem, HasTypeBounds, IsString}; use syntax::ast::{HasModuleItem, HasTypeBounds, IsString};
use crate::{ use crate::{
generics::{GenericParams, GenericParamsCollector, TypeParamData, TypeParamProvenance}, generics::{GenericParamsCollector, TypeParamData, TypeParamProvenance},
type_ref::{LifetimeRef, TraitBoundModifier, TraitRef}, type_ref::{LifetimeRef, TraitBoundModifier},
LocalLifetimeParamId, LocalTypeOrConstParamId, LocalLifetimeParamId, LocalTypeOrConstParamId,
}; };

View file

@ -1,13 +1,12 @@
//! `ItemTree` debug printer. //! `ItemTree` debug printer.
use std::fmt::{self, Write}; use std::fmt::Write;
use span::ErasedFileAstId; use span::ErasedFileAstId;
use crate::{ use crate::{
generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget}, generics::{WherePredicate, WherePredicateTypeTarget},
pretty::{print_path, print_type_bounds, print_type_ref}, pretty::{print_path, print_type_bounds, print_type_ref},
visibility::RawVisibility,
}; };
use super::*; use super::*;

View file

@ -33,7 +33,7 @@ m!(&k");
"#, "#,
expect![[r#" expect![[r#"
macro_rules! m { ($i:literal) => {}; } macro_rules! m { ($i:literal) => {}; }
/* error: invalid token tree */"#]], /* error: mismatched delimiters */"#]],
); );
} }

View file

@ -68,26 +68,26 @@ m2!();
"#, "#,
expect![[r#" expect![[r#"
macro_rules! i1 { invalid } macro_rules! i1 { invalid }
/* error: invalid macro definition: expected subtree */ /* error: macro definition has parse errors */
macro_rules! e1 { $i:ident => () } macro_rules! e1 { $i:ident => () }
/* error: invalid macro definition: expected subtree */ /* error: macro definition has parse errors */
macro_rules! e2 { ($i:ident) () } macro_rules! e2 { ($i:ident) () }
/* error: invalid macro definition: expected `=` */ /* error: macro definition has parse errors */
macro_rules! e3 { ($(i:ident)_) => () } macro_rules! e3 { ($(i:ident)_) => () }
/* error: invalid macro definition: invalid repeat */ /* error: macro definition has parse errors */
macro_rules! f1 { ($i) => ($i) } macro_rules! f1 { ($i) => ($i) }
/* error: invalid macro definition: missing fragment specifier */ /* error: macro definition has parse errors */
macro_rules! f2 { ($i:) => ($i) } macro_rules! f2 { ($i:) => ($i) }
/* error: invalid macro definition: missing fragment specifier */ /* error: macro definition has parse errors */
macro_rules! f3 { ($i:_) => () } macro_rules! f3 { ($i:_) => () }
/* error: invalid macro definition: missing fragment specifier */ /* error: macro definition has parse errors */
macro_rules! m1 { ($$i) => () } macro_rules! m1 { ($$i) => () }
/* error: invalid macro definition: `$$` is not allowed on the pattern side */ /* error: macro definition has parse errors */
macro_rules! m2 { () => ( ${invalid()} ) } macro_rules! m2 { () => ( ${invalid()} ) }
/* error: invalid macro definition: invalid metavariable expression */ /* error: macro definition has parse errors */
"#]], "#]],
) )
} }
@ -137,18 +137,18 @@ macro_rules! m9 { ($($($($i:ident)?)*)+) => {}; }
macro_rules! mA { ($($($($i:ident)+)?)*) => {}; } macro_rules! mA { ($($($($i:ident)+)?)*) => {}; }
macro_rules! mB { ($($($($i:ident)+)*)?) => {}; } macro_rules! mB { ($($($($i:ident)+)*)?) => {}; }
/* error: invalid macro definition: empty token tree in repetition */ /* error: macro definition has parse errors */
/* error: invalid macro definition: empty token tree in repetition */ /* error: macro definition has parse errors */
/* error: invalid macro definition: empty token tree in repetition */ /* error: macro definition has parse errors */
/* error: invalid macro definition: empty token tree in repetition */ /* error: macro definition has parse errors */
/* error: invalid macro definition: empty token tree in repetition */ /* error: macro definition has parse errors */
/* error: invalid macro definition: empty token tree in repetition */ /* error: macro definition has parse errors */
/* error: invalid macro definition: empty token tree in repetition */ /* error: macro definition has parse errors */
/* error: invalid macro definition: empty token tree in repetition */ /* error: macro definition has parse errors */
/* error: invalid macro definition: empty token tree in repetition */ /* error: macro definition has parse errors */
/* error: invalid macro definition: empty token tree in repetition */ /* error: macro definition has parse errors */
/* error: invalid macro definition: empty token tree in repetition */ /* error: macro definition has parse errors */
/* error: invalid macro definition: empty token tree in repetition */ /* error: macro definition has parse errors */
"#]], "#]],
); );
} }

View file

@ -275,9 +275,9 @@ macro_rules! depth_too_large {
} }
fn test() { fn test() {
/* error: invalid macro definition: invalid metavariable expression */; /* error: macro definition has parse errors */;
/* error: invalid macro definition: invalid metavariable expression */; /* error: macro definition has parse errors */;
/* error: invalid macro definition: invalid metavariable expression */; /* error: macro definition has parse errors */;
} }
"#]], "#]],
); );

View file

@ -1090,3 +1090,57 @@ fn main() {
"#]], "#]],
); );
} }
#[test]
fn regression_16529() {
check(
r#"
mod any {
#[macro_export]
macro_rules! nameable {
{
struct $name:ident[$a:lifetime]
} => {
$crate::any::nameable! {
struct $name[$a]
a
}
};
{
struct $name:ident[$a:lifetime]
a
} => {};
}
pub use nameable;
nameable! {
Name['a]
}
}
"#,
expect![[r#"
mod any {
#[macro_export]
macro_rules! nameable {
{
struct $name:ident[$a:lifetime]
} => {
$crate::any::nameable! {
struct $name[$a]
a
}
};
{
struct $name:ident[$a:lifetime]
a
} => {};
}
pub use nameable;
/* error: unexpected token in input */$crate::any::nameable! {
struct $name[$a]a
}
}
"#]],
);
}

View file

@ -97,8 +97,8 @@ m2!(x
macro_rules! m1 { ($x:ident) => { ($x } } macro_rules! m1 { ($x:ident) => { ($x } }
macro_rules! m2 { ($x:ident) => {} } macro_rules! m2 { ($x:ident) => {} }
/* error: invalid macro definition: expected subtree */ /* error: macro definition has parse errors */
/* error: invalid token tree */ /* error: mismatched delimiters */
"#]], "#]],
) )
} }

View file

@ -58,6 +58,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
name: "identity_when_valid".into(), name: "identity_when_valid".into(),
kind: ProcMacroKind::Attr, kind: ProcMacroKind::Attr,
expander: sync::Arc::new(IdentityWhenValidProcMacroExpander), expander: sync::Arc::new(IdentityWhenValidProcMacroExpander),
disabled: false,
}, },
)]; )];
let db = TestDB::with_files_extra_proc_macros(ra_fixture, extra_proc_macros); let db = TestDB::with_files_extra_proc_macros(ra_fixture, extra_proc_macros);

View file

@ -57,7 +57,7 @@ pub mod proc_macro;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
use std::{cmp::Ord, ops::Deref}; use std::ops::Deref;
use base_db::{CrateId, Edition, FileId}; use base_db::{CrateId, Edition, FileId};
use hir_expand::{ use hir_expand::{

View file

@ -11,7 +11,7 @@ use either::Either;
use hir_expand::{ use hir_expand::{
ast_id_map::FileAstId, ast_id_map::FileAstId,
attrs::{Attr, AttrId}, attrs::{Attr, AttrId},
builtin_attr_macro::find_builtin_attr, builtin_attr_macro::{find_builtin_attr, BuiltinAttrExpander},
builtin_derive_macro::find_builtin_derive, builtin_derive_macro::find_builtin_derive,
builtin_fn_macro::find_builtin_macro, builtin_fn_macro::find_builtin_macro,
name::{name, AsName, Name}, name::{name, AsName, Name},
@ -98,9 +98,13 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
}; };
( (
name.as_name(), name.as_name(),
CustomProcMacroExpander::new(hir_expand::proc_macro::ProcMacroId( if it.disabled {
idx as u32, CustomProcMacroExpander::disabled()
)), } else {
CustomProcMacroExpander::new(
hir_expand::proc_macro::ProcMacroId::new(idx as u32),
)
},
) )
}) })
.collect()) .collect())
@ -604,9 +608,6 @@ impl DefCollector<'_> {
id: ItemTreeId<item_tree::Function>, id: ItemTreeId<item_tree::Function>,
fn_id: FunctionId, fn_id: FunctionId,
) { ) {
if self.def_map.block.is_some() {
return;
}
let kind = def.kind.to_basedb_kind(); let kind = def.kind.to_basedb_kind();
let (expander, kind) = let (expander, kind) =
match self.proc_macros.as_ref().map(|it| it.iter().find(|(n, _)| n == &def.name)) { match self.proc_macros.as_ref().map(|it| it.iter().find(|(n, _)| n == &def.name)) {
@ -1120,9 +1121,16 @@ impl DefCollector<'_> {
let mut push_resolved = |directive: &MacroDirective, call_id| { let mut push_resolved = |directive: &MacroDirective, call_id| {
resolved.push((directive.module_id, directive.depth, directive.container, call_id)); resolved.push((directive.module_id, directive.depth, directive.container, call_id));
}; };
#[derive(PartialEq, Eq)]
enum Resolved {
Yes,
No,
}
let mut res = ReachedFixedPoint::Yes; let mut res = ReachedFixedPoint::Yes;
// Retain unresolved macros after this round of resolution. // Retain unresolved macros after this round of resolution.
macros.retain(|directive| { let mut retain = |directive: &MacroDirective| {
let subns = match &directive.kind { let subns = match &directive.kind {
MacroDirectiveKind::FnLike { .. } => MacroSubNs::Bang, MacroDirectiveKind::FnLike { .. } => MacroSubNs::Bang,
MacroDirectiveKind::Attr { .. } | MacroDirectiveKind::Derive { .. } => { MacroDirectiveKind::Attr { .. } | MacroDirectiveKind::Derive { .. } => {
@ -1156,10 +1164,11 @@ impl DefCollector<'_> {
self.def_map.modules[directive.module_id] self.def_map.modules[directive.module_id]
.scope .scope
.add_macro_invoc(ast_id.ast_id, call_id); .add_macro_invoc(ast_id.ast_id, call_id);
push_resolved(directive, call_id); push_resolved(directive, call_id);
res = ReachedFixedPoint::No; res = ReachedFixedPoint::No;
return false; return Resolved::Yes;
} }
} }
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site } => { MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site } => {
@ -1198,7 +1207,7 @@ impl DefCollector<'_> {
push_resolved(directive, call_id); push_resolved(directive, call_id);
res = ReachedFixedPoint::No; res = ReachedFixedPoint::No;
return false; return Resolved::Yes;
} }
} }
MacroDirectiveKind::Attr { ast_id: file_ast_id, mod_item, attr, tree } => { MacroDirectiveKind::Attr { ast_id: file_ast_id, mod_item, attr, tree } => {
@ -1221,7 +1230,7 @@ impl DefCollector<'_> {
} }
.collect(&[*mod_item], directive.container); .collect(&[*mod_item], directive.container);
res = ReachedFixedPoint::No; res = ReachedFixedPoint::No;
false Resolved::Yes
}; };
if let Some(ident) = path.as_ident() { if let Some(ident) = path.as_ident() {
@ -1237,13 +1246,18 @@ impl DefCollector<'_> {
let def = match resolver_def_id(path.clone()) { let def = match resolver_def_id(path.clone()) {
Some(def) if def.is_attribute() => def, Some(def) if def.is_attribute() => def,
_ => return true, _ => return Resolved::No,
}; };
if matches!(
def, if let MacroDefId {
MacroDefId { kind: MacroDefKind::BuiltInAttr(expander, _),.. } kind:
if expander.is_derive() MacroDefKind::BuiltInAttr(
) { BuiltinAttrExpander::Derive | BuiltinAttrExpander::DeriveConst,
_,
),
..
} = def
{
// Resolved to `#[derive]`, we don't actually expand this attribute like // Resolved to `#[derive]`, we don't actually expand this attribute like
// normal (as that would just be an identity expansion with extra output) // normal (as that would just be an identity expansion with extra output)
// Instead we treat derive attributes special and apply them separately. // Instead we treat derive attributes special and apply them separately.
@ -1316,16 +1330,6 @@ impl DefCollector<'_> {
let call_id = let call_id =
attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def); attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def);
// If proc attribute macro expansion is disabled, skip expanding it here
if !self.db.expand_proc_attr_macros() {
self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
directive.module_id,
self.db.lookup_intern_macro_call(call_id).kind,
def.krate,
));
return recollect_without(self);
}
// Skip #[test]/#[bench] expansion, which would merely result in more memory usage // Skip #[test]/#[bench] expansion, which would merely result in more memory usage
// due to duplicating functions into macro expansions // due to duplicating functions into macro expansions
if matches!( if matches!(
@ -1337,17 +1341,29 @@ impl DefCollector<'_> {
} }
if let MacroDefKind::ProcMacro(exp, ..) = def.kind { if let MacroDefKind::ProcMacro(exp, ..) = def.kind {
if exp.is_dummy() { // If proc attribute macro expansion is disabled, skip expanding it here
// If there's no expander for the proc macro (e.g. if !self.db.expand_proc_attr_macros() {
// because proc macros are disabled, or building the
// proc macro crate failed), report this and skip
// expansion like we would if it was disabled
self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
directive.module_id, directive.module_id,
self.db.lookup_intern_macro_call(call_id).kind, self.db.lookup_intern_macro_call(call_id).kind,
def.krate, def.krate,
)); ));
return recollect_without(self);
}
// If there's no expander for the proc macro (e.g.
// because proc macros are disabled, or building the
// proc macro crate failed), report this and skip
// expansion like we would if it was disabled
if exp.is_dummy() {
self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
directive.module_id,
self.db.lookup_intern_macro_call(call_id).kind,
def.krate,
));
return recollect_without(self);
}
if exp.is_disabled() {
return recollect_without(self); return recollect_without(self);
} }
} }
@ -1358,12 +1374,13 @@ impl DefCollector<'_> {
push_resolved(directive, call_id); push_resolved(directive, call_id);
res = ReachedFixedPoint::No; res = ReachedFixedPoint::No;
return false; return Resolved::Yes;
} }
} }
true Resolved::No
}); };
macros.retain(|it| retain(it) == Resolved::No);
// Attribute resolution can add unresolved macro invocations, so concatenate the lists. // Attribute resolution can add unresolved macro invocations, so concatenate the lists.
macros.extend(mem::take(&mut self.unresolved_macros)); macros.extend(mem::take(&mut self.unresolved_macros));
self.unresolved_macros = macros; self.unresolved_macros = macros;
@ -1673,7 +1690,11 @@ impl ModCollector<'_, '_> {
FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db); FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db);
let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
if self.def_collector.is_proc_macro && self.module_id == DefMap::ROOT {
if self.def_collector.def_map.block.is_none()
&& self.def_collector.is_proc_macro
&& self.module_id == DefMap::ROOT
{
if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) { if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) {
self.def_collector.export_proc_macro( self.def_collector.export_proc_macro(
proc_macro, proc_macro,
@ -2333,7 +2354,7 @@ impl ModCollector<'_, '_> {
resolved_res.resolved_def.take_macros().map(|it| db.macro_def(it)) resolved_res.resolved_def.take_macros().map(|it| db.macro_def(it))
}, },
) { ) {
// FIXME: if there were errors, this mightve been in the eager expansion from an // FIXME: if there were errors, this might've been in the eager expansion from an
// unresolved macro, so we need to push this into late macro resolution. see fixme above // unresolved macro, so we need to push this into late macro resolution. see fixme above
if res.err.is_none() { if res.err.is_none() {
// Legacy macros need to be expanded immediately, so that any macros they produce // Legacy macros need to be expanded immediately, so that any macros they produce

View file

@ -103,6 +103,9 @@ impl DefDiagnostic {
} }
// FIXME: Whats the difference between this and unresolved_macro_call // FIXME: Whats the difference between this and unresolved_macro_call
// FIXME: This is used for a lot of things, unresolved proc macros, disabled proc macros, etc
// yet the diagnostic handler in ide-diagnostics has to figure out what happened because this
// struct loses all that information!
pub(crate) fn unresolved_proc_macro( pub(crate) fn unresolved_proc_macro(
container: LocalModuleId, container: LocalModuleId,
ast: MacroCallKind, ast: MacroCallKind,

View file

@ -446,7 +446,7 @@ fn compile_error_expand(
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let err = match &*tt.token_trees { let err = match &*tt.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) { [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) {
Some(unquoted) => ExpandError::other(unquoted), Some(unquoted) => ExpandError::other(unquoted.into_boxed_str()),
None => ExpandError::other("`compile_error!` argument must be a string"), None => ExpandError::other("`compile_error!` argument must be a string"),
}, },
_ => ExpandError::other("`compile_error!` argument must be a string"), _ => ExpandError::other("`compile_error!` argument must be a string"),

View file

@ -1,6 +1,10 @@
//! Defines a unit of change that can applied to the database to get the next //! Defines a unit of change that can applied to the database to get the next
//! state. Changes are transactional. //! state. Changes are transactional.
use base_db::{salsa::Durability, CrateGraph, FileChange, SourceDatabaseExt, SourceRoot}; use base_db::{
salsa::Durability, CrateGraph, CrateId, FileChange, SourceDatabaseExt, SourceRoot,
TargetLayoutLoadResult, Version,
};
use la_arena::RawIdx;
use span::FileId; use span::FileId;
use triomphe::Arc; use triomphe::Arc;
@ -10,6 +14,8 @@ use crate::{db::ExpandDatabase, proc_macro::ProcMacros};
pub struct Change { pub struct Change {
pub source_change: FileChange, pub source_change: FileChange,
pub proc_macros: Option<ProcMacros>, pub proc_macros: Option<ProcMacros>,
pub toolchains: Option<Vec<Option<Version>>>,
pub target_data_layouts: Option<Vec<TargetLayoutLoadResult>>,
} }
impl Change { impl Change {
@ -22,6 +28,24 @@ impl Change {
if let Some(proc_macros) = self.proc_macros { if let Some(proc_macros) = self.proc_macros {
db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH); db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH);
} }
if let Some(target_data_layouts) = self.target_data_layouts {
for (id, val) in target_data_layouts.into_iter().enumerate() {
db.set_data_layout_with_durability(
CrateId::from_raw(RawIdx::from(id as u32)),
val,
Durability::HIGH,
);
}
}
if let Some(toolchains) = self.toolchains {
for (id, val) in toolchains.into_iter().enumerate() {
db.set_toolchain_with_durability(
CrateId::from_raw(RawIdx::from(id as u32)),
val,
Durability::HIGH,
);
}
}
} }
pub fn change_file(&mut self, file_id: FileId, new_text: Option<Arc<str>>) { pub fn change_file(&mut self, file_id: FileId, new_text: Option<Arc<str>>) {
@ -36,6 +60,14 @@ impl Change {
self.proc_macros = Some(proc_macros); self.proc_macros = Some(proc_macros);
} }
pub fn set_toolchains(&mut self, toolchains: Vec<Option<Version>>) {
self.toolchains = Some(toolchains);
}
pub fn set_target_data_layouts(&mut self, target_data_layouts: Vec<TargetLayoutLoadResult>) {
self.target_data_layouts = Some(target_data_layouts);
}
pub fn set_roots(&mut self, roots: Vec<SourceRoot>) { pub fn set_roots(&mut self, roots: Vec<SourceRoot>) {
self.source_change.set_roots(roots) self.source_change.set_roots(roots)
} }

View file

@ -108,7 +108,7 @@ pub trait ExpandDatabase: SourceDatabase {
fn macro_arg( fn macro_arg(
&self, &self,
id: MacroCallId, id: MacroCallId,
) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>>; ) -> ValueResult<(Arc<tt::Subtree>, SyntaxFixupUndoInfo), Arc<Box<[SyntaxError]>>>;
/// Fetches the expander for this macro. /// Fetches the expander for this macro.
#[salsa::transparent] #[salsa::transparent]
#[salsa::invoke(TokenExpander::macro_expander)] #[salsa::invoke(TokenExpander::macro_expander)]
@ -326,58 +326,77 @@ fn macro_arg(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
// FIXME: consider the following by putting fixup info into eager call info args // FIXME: consider the following by putting fixup info into eager call info args
// ) -> ValueResult<Option<Arc<(tt::Subtree, SyntaxFixupUndoInfo)>>, Arc<Box<[SyntaxError]>>> { // ) -> ValueResult<Arc<(tt::Subtree, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>> {
) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>> { ) -> ValueResult<(Arc<tt::Subtree>, SyntaxFixupUndoInfo), Arc<Box<[SyntaxError]>>> {
let mismatched_delimiters = |arg: &SyntaxNode| {
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
let well_formed_tt =
matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
if !well_formed_tt {
// Don't expand malformed (unbalanced) macro invocations. This is
// less than ideal, but trying to expand unbalanced macro calls
// sometimes produces pathological, deeply nested code which breaks
// all kinds of things.
//
// Some day, we'll have explicit recursion counters for all
// recursive things, at which point this code might be removed.
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
Some(Arc::new(Box::new([SyntaxError::new(
"unbalanced token tree".to_owned(),
arg.text_range(),
)]) as Box<[_]>))
} else {
None
}
};
let loc = db.lookup_intern_macro_call(id); let loc = db.lookup_intern_macro_call(id);
if let Some(EagerCallInfo { arg, .. }) = matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) if let Some(EagerCallInfo { arg, .. }) = matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
.then(|| loc.eager.as_deref()) .then(|| loc.eager.as_deref())
.flatten() .flatten()
{ {
ValueResult::ok(Some((arg.clone(), SyntaxFixupUndoInfo::NONE))) ValueResult::ok((arg.clone(), SyntaxFixupUndoInfo::NONE))
} else { } else {
let (parse, map) = parse_with_map(db, loc.kind.file_id()); let (parse, map) = parse_with_map(db, loc.kind.file_id());
let root = parse.syntax_node(); let root = parse.syntax_node();
let syntax = match loc.kind { let syntax = match loc.kind {
MacroCallKind::FnLike { ast_id, .. } => { MacroCallKind::FnLike { ast_id, .. } => {
let dummy_tt = |kind| {
(
Arc::new(tt::Subtree {
delimiter: tt::Delimiter {
open: loc.call_site,
close: loc.call_site,
kind,
},
token_trees: Box::default(),
}),
SyntaxFixupUndoInfo::default(),
)
};
let node = &ast_id.to_ptr(db).to_node(&root); let node = &ast_id.to_ptr(db).to_node(&root);
let offset = node.syntax().text_range().start(); let offset = node.syntax().text_range().start();
match node.token_tree() { let Some(tt) = node.token_tree() else {
Some(tt) => { return ValueResult::new(
let tt = tt.syntax(); dummy_tt(tt::DelimiterKind::Invisible),
if let Some(e) = mismatched_delimiters(tt) { Arc::new(Box::new([SyntaxError::new_at_offset(
return ValueResult::only_err(e); "missing token tree".to_owned(),
} offset,
tt.clone() )])),
} );
None => { };
return ValueResult::only_err(Arc::new(Box::new([ let first = tt.left_delimiter_token().map(|it| it.kind()).unwrap_or(T!['(']);
SyntaxError::new_at_offset("missing token tree".to_owned(), offset), let last = tt.right_delimiter_token().map(|it| it.kind()).unwrap_or(T![.]);
])));
} let mismatched_delimiters = !matches!(
(first, last),
(T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}'])
);
if mismatched_delimiters {
// Don't expand malformed (unbalanced) macro invocations. This is
// less than ideal, but trying to expand unbalanced macro calls
// sometimes produces pathological, deeply nested code which breaks
// all kinds of things.
//
// So instead, we'll return an empty subtree here
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
let kind = match first {
_ if loc.def.is_proc_macro() => tt::DelimiterKind::Invisible,
T!['('] => tt::DelimiterKind::Parenthesis,
T!['['] => tt::DelimiterKind::Bracket,
T!['{'] => tt::DelimiterKind::Brace,
_ => tt::DelimiterKind::Invisible,
};
return ValueResult::new(
dummy_tt(kind),
Arc::new(Box::new([SyntaxError::new_at_offset(
"mismatched delimiters".to_owned(),
offset,
)])),
);
} }
tt.syntax().clone()
} }
MacroCallKind::Derive { ast_id, .. } => { MacroCallKind::Derive { ast_id, .. } => {
ast_id.to_ptr(db).to_node(&root).syntax().clone() ast_id.to_ptr(db).to_node(&root).syntax().clone()
@ -427,15 +446,15 @@ fn macro_arg(
if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) { if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
match parse.errors() { match parse.errors() {
[] => ValueResult::ok(Some((Arc::new(tt), undo_info))), [] => ValueResult::ok((Arc::new(tt), undo_info)),
errors => ValueResult::new( errors => ValueResult::new(
Some((Arc::new(tt), undo_info)), (Arc::new(tt), undo_info),
// Box::<[_]>::from(res.errors()), not stable yet // Box::<[_]>::from(res.errors()), not stable yet
Arc::new(errors.to_vec().into_boxed_slice()), Arc::new(errors.to_vec().into_boxed_slice()),
), ),
} }
} else { } else {
ValueResult::ok(Some((Arc::new(tt), undo_info))) ValueResult::ok((Arc::new(tt), undo_info))
} }
} }
} }
@ -519,21 +538,20 @@ fn macro_expand(
expander.expand(db, macro_call_id, &node, map.as_ref()) expander.expand(db, macro_call_id, &node, map.as_ref())
} }
_ => { _ => {
let ValueResult { value, err } = db.macro_arg(macro_call_id); let ValueResult { value: (macro_arg, undo_info), err } = db.macro_arg(macro_call_id);
let Some((macro_arg, undo_info)) = value else { let format_parse_err = |err: Arc<Box<[SyntaxError]>>| {
return ExpandResult { let mut buf = String::new();
value: CowArc::Owned(tt::Subtree { for err in &**err {
delimiter: tt::Delimiter::invisible_spanned(loc.call_site), use std::fmt::Write;
token_trees: Box::new([]), _ = write!(buf, "{}, ", err);
}), }
// FIXME: We should make sure to enforce an invariant that invalid macro buf.pop();
// calls do not reach this call path! buf.pop();
err: Some(ExpandError::other("invalid token tree")), ExpandError::other(buf)
};
}; };
let arg = &*macro_arg; let arg = &*macro_arg;
match loc.def.kind { let res = match loc.def.kind {
MacroDefKind::Declarative(id) => { MacroDefKind::Declarative(id) => {
db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id) db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id)
} }
@ -549,16 +567,7 @@ fn macro_expand(
MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => { MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
return ExpandResult { return ExpandResult {
value: CowArc::Arc(macro_arg.clone()), value: CowArc::Arc(macro_arg.clone()),
err: err.map(|err| { err: err.map(format_parse_err),
let mut buf = String::new();
for err in &**err {
use std::fmt::Write;
_ = write!(buf, "{}, ", err);
}
buf.pop();
buf.pop();
ExpandError::other(buf)
}),
}; };
} }
MacroDefKind::BuiltInEager(it, _) => { MacroDefKind::BuiltInEager(it, _) => {
@ -570,6 +579,11 @@ fn macro_expand(
res res
} }
_ => unreachable!(), _ => unreachable!(),
};
ExpandResult {
value: res.value,
// if the arg had parse errors, show them instead of the expansion errors
err: err.map(format_parse_err).or(res.err),
} }
} }
}; };
@ -597,17 +611,7 @@ fn macro_expand(
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> { fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
let loc = db.lookup_intern_macro_call(id); let loc = db.lookup_intern_macro_call(id);
let Some((macro_arg, undo_info)) = db.macro_arg(id).value else { let (macro_arg, undo_info) = db.macro_arg(id).value;
return ExpandResult {
value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(loc.call_site),
token_trees: Box::new([]),
}),
// FIXME: We should make sure to enforce an invariant that invalid macro
// calls do not reach this call path!
err: Some(ExpandError::other("invalid token tree")),
};
};
let expander = match loc.def.kind { let expander = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => expander, MacroDefKind::ProcMacro(expander, ..) => expander,

View file

@ -31,7 +31,7 @@ impl DeclarativeMacroExpander {
call_id: MacroCallId, call_id: MacroCallId,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(call_id); let loc = db.lookup_intern_macro_call(call_id);
let toolchain = &db.crate_graph()[loc.def.krate].toolchain; let toolchain = db.toolchain(loc.def.krate);
let new_meta_vars = toolchain.as_ref().map_or(false, |version| { let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
&base_db::Version { &base_db::Version {
@ -44,9 +44,9 @@ impl DeclarativeMacroExpander {
) )
}); });
match self.mac.err() { match self.mac.err() {
Some(e) => ExpandResult::new( Some(_) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }), tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }),
ExpandError::other(format!("invalid macro definition: {e}")), ExpandError::MacroDefinition,
), ),
None => self None => self
.mac .mac
@ -67,7 +67,7 @@ impl DeclarativeMacroExpander {
krate: CrateId, krate: CrateId,
call_site: Span, call_site: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let toolchain = &db.crate_graph()[krate].toolchain; let toolchain = db.toolchain(krate);
let new_meta_vars = toolchain.as_ref().map_or(false, |version| { let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
&base_db::Version { &base_db::Version {
@ -80,9 +80,9 @@ impl DeclarativeMacroExpander {
) )
}); });
match self.mac.err() { match self.mac.err() {
Some(e) => ExpandResult::new( Some(_) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::other(format!("invalid macro definition: {e}")), ExpandError::MacroDefinition,
), ),
None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into), None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into),
} }
@ -119,7 +119,7 @@ impl DeclarativeMacroExpander {
_ => None, _ => None,
} }
}; };
let toolchain = crate_data.toolchain.as_ref(); let toolchain = db.toolchain(def_crate);
let new_meta_vars = toolchain.as_ref().map_or(false, |version| { let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
&base_db::Version { &base_db::Version {

View file

@ -44,7 +44,6 @@ use crate::{
builtin_derive_macro::BuiltinDeriveExpander, builtin_derive_macro::BuiltinDeriveExpander,
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
db::{ExpandDatabase, TokenExpander}, db::{ExpandDatabase, TokenExpander},
fixup::SyntaxFixupUndoInfo,
hygiene::SyntaxContextData, hygiene::SyntaxContextData,
mod_path::ModPath, mod_path::ModPath,
proc_macro::{CustomProcMacroExpander, ProcMacroKind}, proc_macro::{CustomProcMacroExpander, ProcMacroKind},
@ -129,8 +128,11 @@ pub type ExpandResult<T> = ValueResult<T, ExpandError>;
#[derive(Debug, PartialEq, Eq, Clone, Hash)] #[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub enum ExpandError { pub enum ExpandError {
UnresolvedProcMacro(CrateId), UnresolvedProcMacro(CrateId),
/// The macro expansion is disabled.
MacroDisabled,
MacroDefinition,
Mbe(mbe::ExpandError), Mbe(mbe::ExpandError),
RecursionOverflowPoisoned, RecursionOverflow,
Other(Box<Box<str>>), Other(Box<Box<str>>),
ProcMacroPanic(Box<Box<str>>), ProcMacroPanic(Box<Box<str>>),
} }
@ -152,14 +154,14 @@ impl fmt::Display for ExpandError {
match self { match self {
ExpandError::UnresolvedProcMacro(_) => f.write_str("unresolved proc-macro"), ExpandError::UnresolvedProcMacro(_) => f.write_str("unresolved proc-macro"),
ExpandError::Mbe(it) => it.fmt(f), ExpandError::Mbe(it) => it.fmt(f),
ExpandError::RecursionOverflowPoisoned => { ExpandError::RecursionOverflow => f.write_str("overflow expanding the original macro"),
f.write_str("overflow expanding the original macro")
}
ExpandError::ProcMacroPanic(it) => { ExpandError::ProcMacroPanic(it) => {
f.write_str("proc-macro panicked: ")?; f.write_str("proc-macro panicked: ")?;
f.write_str(it) f.write_str(it)
} }
ExpandError::Other(it) => f.write_str(it), ExpandError::Other(it) => f.write_str(it),
ExpandError::MacroDisabled => f.write_str("macro disabled"),
ExpandError::MacroDefinition => f.write_str("macro definition has parse errors"),
} }
} }
} }
@ -225,8 +227,8 @@ pub enum MacroCallKind {
}, },
Attr { Attr {
ast_id: AstId<ast::Item>, ast_id: AstId<ast::Item>,
// FIXME: This is being interned, subtrees can vary quickly differ just slightly causing // FIXME: This shouldn't be here, we can derive this from `invoc_attr_index`
// leakage problems here // but we need to fix the `cfg_attr` handling first.
attr_args: Option<Arc<tt::Subtree>>, attr_args: Option<Arc<tt::Subtree>>,
/// Syntactical index of the invoking `#[attribute]`. /// Syntactical index of the invoking `#[attribute]`.
/// ///
@ -758,15 +760,7 @@ impl ExpansionInfo {
let (parse, exp_map) = db.parse_macro_expansion(macro_file).value; let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() }; let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() };
let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value;
(
Arc::new(tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(loc.call_site),
token_trees: Box::new([]),
}),
SyntaxFixupUndoInfo::NONE,
)
});
let def = loc.def.ast_id().left().and_then(|id| { let def = loc.def.ast_id().left().and_then(|id| {
let def_tt = match id.to_node(db) { let def_tt = match id.to_node(db) {

View file

@ -94,6 +94,21 @@ impl ModPath {
} }
} }
pub fn textual_len(&self) -> usize {
let base = match self.kind {
PathKind::Plain => 0,
PathKind::Super(0) => "self".len(),
PathKind::Super(i) => "super".len() * i as usize,
PathKind::Crate => "crate".len(),
PathKind::Abs => 0,
PathKind::DollarCrate(_) => "$crate".len(),
};
self.segments()
.iter()
.map(|segment| segment.as_str().map_or(0, str::len))
.fold(base, core::ops::Add::add)
}
pub fn is_ident(&self) -> bool { pub fn is_ident(&self) -> bool {
self.as_ident().is_some() self.as_ident().is_some()
} }

View file

@ -12,7 +12,13 @@ use syntax::SmolStr;
use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult}; use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult};
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct ProcMacroId(pub u32); pub struct ProcMacroId(u32);
impl ProcMacroId {
pub fn new(u32: u32) -> Self {
ProcMacroId(u32)
}
}
#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] #[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)]
pub enum ProcMacroKind { pub enum ProcMacroKind {
@ -49,6 +55,7 @@ pub struct ProcMacro {
pub name: SmolStr, pub name: SmolStr,
pub kind: ProcMacroKind, pub kind: ProcMacroKind,
pub expander: sync::Arc<dyn ProcMacroExpander>, pub expander: sync::Arc<dyn ProcMacroExpander>,
pub disabled: bool,
} }
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
@ -56,20 +63,35 @@ pub struct CustomProcMacroExpander {
proc_macro_id: ProcMacroId, proc_macro_id: ProcMacroId,
} }
const DUMMY_ID: u32 = !0;
impl CustomProcMacroExpander { impl CustomProcMacroExpander {
const DUMMY_ID: u32 = !0;
const DISABLED_ID: u32 = !1;
pub fn new(proc_macro_id: ProcMacroId) -> Self { pub fn new(proc_macro_id: ProcMacroId) -> Self {
assert_ne!(proc_macro_id.0, DUMMY_ID); assert_ne!(proc_macro_id.0, Self::DUMMY_ID);
assert_ne!(proc_macro_id.0, Self::DISABLED_ID);
Self { proc_macro_id } Self { proc_macro_id }
} }
pub fn dummy() -> Self { /// A dummy expander that always errors. This is used for proc-macros that are missing, usually
Self { proc_macro_id: ProcMacroId(DUMMY_ID) } /// due to them not being built yet.
pub const fn dummy() -> Self {
Self { proc_macro_id: ProcMacroId(Self::DUMMY_ID) }
} }
pub fn is_dummy(&self) -> bool { /// The macro was not yet resolved.
self.proc_macro_id.0 == DUMMY_ID pub const fn is_dummy(&self) -> bool {
self.proc_macro_id.0 == Self::DUMMY_ID
}
/// A dummy expander that always errors. This expander is used for macros that have been disabled.
pub const fn disabled() -> Self {
Self { proc_macro_id: ProcMacroId(Self::DISABLED_ID) }
}
/// The macro is explicitly disabled and cannot be expanded.
pub const fn is_disabled(&self) -> bool {
self.proc_macro_id.0 == Self::DISABLED_ID
} }
pub fn expand( pub fn expand(
@ -84,10 +106,14 @@ impl CustomProcMacroExpander {
mixed_site: Span, mixed_site: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
match self.proc_macro_id { match self.proc_macro_id {
ProcMacroId(DUMMY_ID) => ExpandResult::new( ProcMacroId(Self::DUMMY_ID) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::UnresolvedProcMacro(def_crate), ExpandError::UnresolvedProcMacro(def_crate),
), ),
ProcMacroId(Self::DISABLED_ID) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::MacroDisabled,
),
ProcMacroId(id) => { ProcMacroId(id) => {
let proc_macros = db.proc_macros(); let proc_macros = db.proc_macros();
let proc_macros = match proc_macros.get(&def_crate) { let proc_macros = match proc_macros.get(&def_crate) {
@ -110,7 +136,7 @@ impl CustomProcMacroExpander {
); );
return ExpandResult::new( return ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::other("Internal error"), ExpandError::other("Internal error: proc-macro index out of bounds"),
); );
} }
}; };

View file

@ -169,9 +169,9 @@ impl ExprValidator {
return; return;
} }
let pattern_arena = Arena::new(); let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db);
let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db, &pattern_arena);
let pattern_arena = Arena::new();
let mut m_arms = Vec::with_capacity(arms.len()); let mut m_arms = Vec::with_capacity(arms.len());
let mut has_lowering_errors = false; let mut has_lowering_errors = false;
for arm in arms { for arm in arms {
@ -196,8 +196,9 @@ impl ExprValidator {
// If we had a NotUsefulMatchArm diagnostic, we could // If we had a NotUsefulMatchArm diagnostic, we could
// check the usefulness of each pattern as we added it // check the usefulness of each pattern as we added it
// to the matrix here. // to the matrix here.
let pat = self.lower_pattern(&cx, arm.pat, db, &body, &mut has_lowering_errors);
let m_arm = pat_analysis::MatchArm { let m_arm = pat_analysis::MatchArm {
pat: self.lower_pattern(&cx, arm.pat, db, &body, &mut has_lowering_errors), pat: pattern_arena.alloc(pat),
has_guard: arm.guard.is_some(), has_guard: arm.guard.is_some(),
arm_data: (), arm_data: (),
}; };
@ -223,7 +224,7 @@ impl ExprValidator {
ValidityConstraint::ValidOnly, ValidityConstraint::ValidOnly,
) { ) {
Ok(report) => report, Ok(report) => report,
Err(void) => match void {}, Err(()) => return,
}; };
// FIXME Report unreachable arms // FIXME Report unreachable arms
@ -245,10 +246,10 @@ impl ExprValidator {
db: &dyn HirDatabase, db: &dyn HirDatabase,
body: &Body, body: &Body,
have_errors: &mut bool, have_errors: &mut bool,
) -> &'p DeconstructedPat<'p> { ) -> DeconstructedPat<'p> {
let mut patcx = match_check::PatCtxt::new(db, &self.infer, body); let mut patcx = match_check::PatCtxt::new(db, &self.infer, body);
let pattern = patcx.lower_pattern(pat); let pattern = patcx.lower_pattern(pat);
let pattern = cx.pattern_arena.alloc(cx.lower_pat(&pattern)); let pattern = cx.lower_pat(&pattern);
if !patcx.errors.is_empty() { if !patcx.errors.is_empty() {
*have_errors = true; *have_errors = true;
} }

View file

@ -1,6 +1,7 @@
//! Interface with `rustc_pattern_analysis`. //! Interface with `rustc_pattern_analysis`.
use std::fmt; use std::fmt;
use tracing::debug;
use hir_def::{DefWithBodyId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId}; use hir_def::{DefWithBodyId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
@ -11,7 +12,6 @@ use rustc_pattern_analysis::{
}; };
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use stdx::never; use stdx::never;
use typed_arena::Arena;
use crate::{ use crate::{
db::HirDatabase, db::HirDatabase,
@ -26,7 +26,7 @@ use Constructor::*;
// Re-export r-a-specific versions of all these types. // Re-export r-a-specific versions of all these types.
pub(crate) type DeconstructedPat<'p> = pub(crate) type DeconstructedPat<'p> =
rustc_pattern_analysis::pat::DeconstructedPat<'p, MatchCheckCtx<'p>>; rustc_pattern_analysis::pat::DeconstructedPat<MatchCheckCtx<'p>>;
pub(crate) type MatchArm<'p> = rustc_pattern_analysis::MatchArm<'p, MatchCheckCtx<'p>>; pub(crate) type MatchArm<'p> = rustc_pattern_analysis::MatchArm<'p, MatchCheckCtx<'p>>;
pub(crate) type WitnessPat<'p> = rustc_pattern_analysis::pat::WitnessPat<MatchCheckCtx<'p>>; pub(crate) type WitnessPat<'p> = rustc_pattern_analysis::pat::WitnessPat<MatchCheckCtx<'p>>;
@ -40,7 +40,6 @@ pub(crate) struct MatchCheckCtx<'p> {
module: ModuleId, module: ModuleId,
body: DefWithBodyId, body: DefWithBodyId,
pub(crate) db: &'p dyn HirDatabase, pub(crate) db: &'p dyn HirDatabase,
pub(crate) pattern_arena: &'p Arena<DeconstructedPat<'p>>,
exhaustive_patterns: bool, exhaustive_patterns: bool,
min_exhaustive_patterns: bool, min_exhaustive_patterns: bool,
} }
@ -52,17 +51,12 @@ pub(crate) struct PatData<'p> {
} }
impl<'p> MatchCheckCtx<'p> { impl<'p> MatchCheckCtx<'p> {
pub(crate) fn new( pub(crate) fn new(module: ModuleId, body: DefWithBodyId, db: &'p dyn HirDatabase) -> Self {
module: ModuleId,
body: DefWithBodyId,
db: &'p dyn HirDatabase,
pattern_arena: &'p Arena<DeconstructedPat<'p>>,
) -> Self {
let def_map = db.crate_def_map(module.krate()); let def_map = db.crate_def_map(module.krate());
let exhaustive_patterns = def_map.is_unstable_feature_enabled("exhaustive_patterns"); let exhaustive_patterns = def_map.is_unstable_feature_enabled("exhaustive_patterns");
let min_exhaustive_patterns = let min_exhaustive_patterns =
def_map.is_unstable_feature_enabled("min_exhaustive_patterns"); def_map.is_unstable_feature_enabled("min_exhaustive_patterns");
Self { module, body, db, pattern_arena, exhaustive_patterns, min_exhaustive_patterns } Self { module, body, db, exhaustive_patterns, min_exhaustive_patterns }
} }
fn is_uninhabited(&self, ty: &Ty) -> bool { fn is_uninhabited(&self, ty: &Ty) -> bool {
@ -131,15 +125,15 @@ impl<'p> MatchCheckCtx<'p> {
} }
pub(crate) fn lower_pat(&self, pat: &Pat) -> DeconstructedPat<'p> { pub(crate) fn lower_pat(&self, pat: &Pat) -> DeconstructedPat<'p> {
let singleton = |pat| std::slice::from_ref(self.pattern_arena.alloc(pat)); let singleton = |pat| vec![pat];
let ctor; let ctor;
let fields: &[_]; let fields: Vec<_>;
match pat.kind.as_ref() { match pat.kind.as_ref() {
PatKind::Binding { subpattern: Some(subpat), .. } => return self.lower_pat(subpat), PatKind::Binding { subpattern: Some(subpat), .. } => return self.lower_pat(subpat),
PatKind::Binding { subpattern: None, .. } | PatKind::Wild => { PatKind::Binding { subpattern: None, .. } | PatKind::Wild => {
ctor = Wildcard; ctor = Wildcard;
fields = &[]; fields = Vec::new();
} }
PatKind::Deref { subpattern } => { PatKind::Deref { subpattern } => {
ctor = match pat.ty.kind(Interner) { ctor = match pat.ty.kind(Interner) {
@ -157,7 +151,7 @@ impl<'p> MatchCheckCtx<'p> {
match pat.ty.kind(Interner) { match pat.ty.kind(Interner) {
TyKind::Tuple(_, substs) => { TyKind::Tuple(_, substs) => {
ctor = Struct; ctor = Struct;
let mut wilds: SmallVec<[_; 2]> = substs let mut wilds: Vec<_> = substs
.iter(Interner) .iter(Interner)
.map(|arg| arg.assert_ty_ref(Interner).clone()) .map(|arg| arg.assert_ty_ref(Interner).clone())
.map(DeconstructedPat::wildcard) .map(DeconstructedPat::wildcard)
@ -166,7 +160,7 @@ impl<'p> MatchCheckCtx<'p> {
let idx: u32 = pat.field.into_raw().into(); let idx: u32 = pat.field.into_raw().into();
wilds[idx as usize] = self.lower_pat(&pat.pattern); wilds[idx as usize] = self.lower_pat(&pat.pattern);
} }
fields = self.pattern_arena.alloc_extend(wilds) fields = wilds
} }
TyKind::Adt(adt, substs) if is_box(self.db, adt.0) => { TyKind::Adt(adt, substs) if is_box(self.db, adt.0) => {
// The only legal patterns of type `Box` (outside `std`) are `_` and box // The only legal patterns of type `Box` (outside `std`) are `_` and box
@ -216,33 +210,29 @@ impl<'p> MatchCheckCtx<'p> {
field_id_to_id[field_idx as usize] = Some(i); field_id_to_id[field_idx as usize] = Some(i);
ty ty
}); });
let mut wilds: SmallVec<[_; 2]> = let mut wilds: Vec<_> = tys.map(DeconstructedPat::wildcard).collect();
tys.map(DeconstructedPat::wildcard).collect();
for pat in subpatterns { for pat in subpatterns {
let field_idx: u32 = pat.field.into_raw().into(); let field_idx: u32 = pat.field.into_raw().into();
if let Some(i) = field_id_to_id[field_idx as usize] { if let Some(i) = field_id_to_id[field_idx as usize] {
wilds[i] = self.lower_pat(&pat.pattern); wilds[i] = self.lower_pat(&pat.pattern);
} }
} }
fields = self.pattern_arena.alloc_extend(wilds); fields = wilds;
} }
_ => { _ => {
never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty); never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty);
ctor = Wildcard; ctor = Wildcard;
fields = &[]; fields = Vec::new();
} }
} }
} }
&PatKind::LiteralBool { value } => { &PatKind::LiteralBool { value } => {
ctor = Bool(value); ctor = Bool(value);
fields = &[]; fields = Vec::new();
} }
PatKind::Or { pats } => { PatKind::Or { pats } => {
ctor = Or; ctor = Or;
// Collect here because `Arena::alloc_extend` panics on reentrancy. fields = pats.iter().map(|pat| self.lower_pat(pat)).collect();
let subpats: SmallVec<[_; 2]> =
pats.iter().map(|pat| self.lower_pat(pat)).collect();
fields = self.pattern_arena.alloc_extend(subpats);
} }
} }
let data = PatData { db: self.db }; let data = PatData { db: self.db };
@ -307,7 +297,7 @@ impl<'p> MatchCheckCtx<'p> {
} }
impl<'p> TypeCx for MatchCheckCtx<'p> { impl<'p> TypeCx for MatchCheckCtx<'p> {
type Error = Void; type Error = ();
type Ty = Ty; type Ty = Ty;
type VariantIdx = EnumVariantId; type VariantIdx = EnumVariantId;
type StrLit = Void; type StrLit = Void;
@ -463,7 +453,7 @@ impl<'p> TypeCx for MatchCheckCtx<'p> {
fn write_variant_name( fn write_variant_name(
f: &mut fmt::Formatter<'_>, f: &mut fmt::Formatter<'_>,
pat: &rustc_pattern_analysis::pat::DeconstructedPat<'_, Self>, pat: &rustc_pattern_analysis::pat::DeconstructedPat<Self>,
) -> fmt::Result { ) -> fmt::Result {
let variant = let variant =
pat.ty().as_adt().and_then(|(adt, _)| Self::variant_id_for_adt(pat.ctor(), adt)); pat.ty().as_adt().and_then(|(adt, _)| Self::variant_id_for_adt(pat.ctor(), adt));
@ -485,8 +475,8 @@ impl<'p> TypeCx for MatchCheckCtx<'p> {
Ok(()) Ok(())
} }
fn bug(&self, fmt: fmt::Arguments<'_>) -> ! { fn bug(&self, fmt: fmt::Arguments<'_>) {
panic!("{}", fmt) debug!("{}", fmt)
} }
} }

View file

@ -2,7 +2,7 @@
//! //!
//! Originates from `rustc_hir::pat_util` //! Originates from `rustc_hir::pat_util`
use std::iter::{Enumerate, ExactSizeIterator}; use std::iter::Enumerate;
pub(crate) struct EnumerateAndAdjust<I> { pub(crate) struct EnumerateAndAdjust<I> {
enumerate: Enumerate<I>, enumerate: Enumerate<I>,

View file

@ -26,7 +26,7 @@ use std::{convert::identity, ops::Index};
use chalk_ir::{ use chalk_ir::{
cast::Cast, fold::TypeFoldable, interner::HasInterner, DebruijnIndex, Mutability, Safety, cast::Cast, fold::TypeFoldable, interner::HasInterner, DebruijnIndex, Mutability, Safety,
Scalar, TyKind, TypeFlags, Scalar, TyKind, TypeFlags, Variance,
}; };
use either::Either; use either::Either;
use hir_def::{ use hir_def::{
@ -58,8 +58,9 @@ use crate::{
static_lifetime, to_assoc_type_id, static_lifetime, to_assoc_type_id,
traits::FnTrait, traits::FnTrait,
utils::{InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder}, utils::{InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder},
AliasEq, AliasTy, ClosureId, DomainGoal, GenericArg, Goal, ImplTraitId, InEnvironment, AliasEq, AliasTy, Binders, ClosureId, Const, DomainGoal, GenericArg, Goal, ImplTraitId,
Interner, ProjectionTy, RpitId, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, InEnvironment, Interner, Lifetime, ProjectionTy, RpitId, Substitution, TraitEnvironment,
TraitRef, Ty, TyBuilder, TyExt,
}; };
// This lint has a false positive here. See the link below for details. // This lint has a false positive here. See the link below for details.
@ -68,7 +69,7 @@ use crate::{
#[allow(unreachable_pub)] #[allow(unreachable_pub)]
pub use coerce::could_coerce; pub use coerce::could_coerce;
#[allow(unreachable_pub)] #[allow(unreachable_pub)]
pub use unify::could_unify; pub use unify::{could_unify, could_unify_deeply};
use cast::CastCheck; use cast::CastCheck;
pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy}; pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
@ -688,10 +689,17 @@ impl<'a> InferenceContext<'a> {
for ty in type_of_for_iterator.values_mut() { for ty in type_of_for_iterator.values_mut() {
*ty = table.resolve_completely(ty.clone()); *ty = table.resolve_completely(ty.clone());
} }
for mismatch in type_mismatches.values_mut() { type_mismatches.retain(|_, mismatch| {
mismatch.expected = table.resolve_completely(mismatch.expected.clone()); mismatch.expected = table.resolve_completely(mismatch.expected.clone());
mismatch.actual = table.resolve_completely(mismatch.actual.clone()); mismatch.actual = table.resolve_completely(mismatch.actual.clone());
} chalk_ir::zip::Zip::zip_with(
&mut UnknownMismatch(self.db),
Variance::Invariant,
&mismatch.expected,
&mismatch.actual,
)
.is_ok()
});
diagnostics.retain_mut(|diagnostic| { diagnostics.retain_mut(|diagnostic| {
use InferenceDiagnostic::*; use InferenceDiagnostic::*;
match diagnostic { match diagnostic {
@ -1502,3 +1510,116 @@ impl std::ops::BitOrAssign for Diverges {
*self = *self | other; *self = *self | other;
} }
} }
/// A zipper that checks for unequal `{unknown}` occurrences in the two types. Used to filter out
/// mismatch diagnostics that only differ in `{unknown}`. These mismatches are usually not helpful.
/// As the cause is usually an underlying name resolution problem.
struct UnknownMismatch<'db>(&'db dyn HirDatabase);
impl chalk_ir::zip::Zipper<Interner> for UnknownMismatch<'_> {
fn zip_tys(&mut self, variance: Variance, a: &Ty, b: &Ty) -> chalk_ir::Fallible<()> {
let zip_substs = |this: &mut Self,
variances,
sub_a: &Substitution,
sub_b: &Substitution| {
this.zip_substs(variance, variances, sub_a.as_slice(Interner), sub_b.as_slice(Interner))
};
match (a.kind(Interner), b.kind(Interner)) {
(TyKind::Adt(id_a, sub_a), TyKind::Adt(id_b, sub_b)) if id_a == id_b => zip_substs(
self,
Some(self.unification_database().adt_variance(*id_a)),
sub_a,
sub_b,
)?,
(
TyKind::AssociatedType(assoc_ty_a, sub_a),
TyKind::AssociatedType(assoc_ty_b, sub_b),
) if assoc_ty_a == assoc_ty_b => zip_substs(self, None, sub_a, sub_b)?,
(TyKind::Tuple(arity_a, sub_a), TyKind::Tuple(arity_b, sub_b))
if arity_a == arity_b =>
{
zip_substs(self, None, sub_a, sub_b)?
}
(TyKind::OpaqueType(opaque_ty_a, sub_a), TyKind::OpaqueType(opaque_ty_b, sub_b))
if opaque_ty_a == opaque_ty_b =>
{
zip_substs(self, None, sub_a, sub_b)?
}
(TyKind::Slice(ty_a), TyKind::Slice(ty_b)) => self.zip_tys(variance, ty_a, ty_b)?,
(TyKind::FnDef(fn_def_a, sub_a), TyKind::FnDef(fn_def_b, sub_b))
if fn_def_a == fn_def_b =>
{
zip_substs(
self,
Some(self.unification_database().fn_def_variance(*fn_def_a)),
sub_a,
sub_b,
)?
}
(TyKind::Ref(mutability_a, _, ty_a), TyKind::Ref(mutability_b, _, ty_b))
if mutability_a == mutability_b =>
{
self.zip_tys(variance, ty_a, ty_b)?
}
(TyKind::Raw(mutability_a, ty_a), TyKind::Raw(mutability_b, ty_b))
if mutability_a == mutability_b =>
{
self.zip_tys(variance, ty_a, ty_b)?
}
(TyKind::Array(ty_a, const_a), TyKind::Array(ty_b, const_b)) if const_a == const_b => {
self.zip_tys(variance, ty_a, ty_b)?
}
(TyKind::Closure(id_a, sub_a), TyKind::Closure(id_b, sub_b)) if id_a == id_b => {
zip_substs(self, None, sub_a, sub_b)?
}
(TyKind::Coroutine(coroutine_a, sub_a), TyKind::Coroutine(coroutine_b, sub_b))
if coroutine_a == coroutine_b =>
{
zip_substs(self, None, sub_a, sub_b)?
}
(
TyKind::CoroutineWitness(coroutine_a, sub_a),
TyKind::CoroutineWitness(coroutine_b, sub_b),
) if coroutine_a == coroutine_b => zip_substs(self, None, sub_a, sub_b)?,
(TyKind::Function(fn_ptr_a), TyKind::Function(fn_ptr_b))
if fn_ptr_a.sig == fn_ptr_b.sig && fn_ptr_a.num_binders == fn_ptr_b.num_binders =>
{
zip_substs(self, None, &fn_ptr_a.substitution.0, &fn_ptr_b.substitution.0)?
}
(TyKind::Error, TyKind::Error) => (),
(TyKind::Error, _) | (_, TyKind::Error) => return Err(chalk_ir::NoSolution),
_ => (),
}
Ok(())
}
fn zip_lifetimes(&mut self, _: Variance, _: &Lifetime, _: &Lifetime) -> chalk_ir::Fallible<()> {
Ok(())
}
fn zip_consts(&mut self, _: Variance, _: &Const, _: &Const) -> chalk_ir::Fallible<()> {
Ok(())
}
fn zip_binders<T>(
&mut self,
variance: Variance,
a: &Binders<T>,
b: &Binders<T>,
) -> chalk_ir::Fallible<()>
where
T: Clone
+ HasInterner<Interner = Interner>
+ chalk_ir::zip::Zip<Interner>
+ TypeFoldable<Interner>,
{
chalk_ir::zip::Zip::zip_with(self, variance, a.skip_binders(), b.skip_binders())
}
fn interner(&self) -> Interner {
Interner
}
fn unification_database(&self) -> &dyn chalk_ir::UnificationDatabase<Interner> {
&self.0
}
}

View file

@ -485,6 +485,7 @@ impl InferenceContext<'_> {
Statement::Expr { expr, has_semi: _ } => { Statement::Expr { expr, has_semi: _ } => {
self.consume_expr(*expr); self.consume_expr(*expr);
} }
Statement::Item => (),
} }
} }
if let Some(tail) = tail { if let Some(tail) = tail {
@ -531,6 +532,9 @@ impl InferenceContext<'_> {
self.consume_expr(expr); self.consume_expr(expr);
} }
} }
&Expr::Become { expr } => {
self.consume_expr(expr);
}
Expr::RecordLit { fields, spread, .. } => { Expr::RecordLit { fields, spread, .. } => {
if let &Some(expr) = spread { if let &Some(expr) = spread {
self.consume_expr(expr); self.consume_expr(expr);

View file

@ -502,6 +502,7 @@ impl InferenceContext<'_> {
self.result.standard_types.never.clone() self.result.standard_types.never.clone()
} }
&Expr::Return { expr } => self.infer_expr_return(tgt_expr, expr), &Expr::Return { expr } => self.infer_expr_return(tgt_expr, expr),
&Expr::Become { expr } => self.infer_expr_become(expr),
Expr::Yield { expr } => { Expr::Yield { expr } => {
if let Some((resume_ty, yield_ty)) = self.resume_yield_tys.clone() { if let Some((resume_ty, yield_ty)) = self.resume_yield_tys.clone() {
if let Some(expr) = expr { if let Some(expr) = expr {
@ -1084,6 +1085,27 @@ impl InferenceContext<'_> {
self.result.standard_types.never.clone() self.result.standard_types.never.clone()
} }
fn infer_expr_become(&mut self, expr: ExprId) -> Ty {
match &self.return_coercion {
Some(return_coercion) => {
let ret_ty = return_coercion.expected_ty();
let call_expr_ty =
self.infer_expr_inner(expr, &Expectation::HasType(ret_ty.clone()));
// NB: this should *not* coerce.
// tail calls don't support any coercions except lifetimes ones (like `&'static u8 -> &'a u8`).
self.unify(&call_expr_ty, &ret_ty);
}
None => {
// FIXME: diagnose `become` outside of functions
self.infer_expr_no_expect(expr);
}
}
self.result.standard_types.never.clone()
}
fn infer_expr_box(&mut self, inner_expr: ExprId, expected: &Expectation) -> Ty { fn infer_expr_box(&mut self, inner_expr: ExprId, expected: &Expectation) -> Ty {
if let Some(box_id) = self.resolve_boxed_box() { if let Some(box_id) = self.resolve_boxed_box() {
let table = &mut self.table; let table = &mut self.table;
@ -1367,6 +1389,7 @@ impl InferenceContext<'_> {
); );
} }
} }
Statement::Item => (),
} }
} }

View file

@ -65,6 +65,7 @@ impl InferenceContext<'_> {
Statement::Expr { expr, has_semi: _ } => { Statement::Expr { expr, has_semi: _ } => {
self.infer_mut_expr(*expr, Mutability::Not); self.infer_mut_expr(*expr, Mutability::Not);
} }
Statement::Item => (),
} }
} }
if let Some(tail) = tail { if let Some(tail) = tail {
@ -93,6 +94,9 @@ impl InferenceContext<'_> {
self.infer_mut_expr(expr, Mutability::Not); self.infer_mut_expr(expr, Mutability::Not);
} }
} }
Expr::Become { expr } => {
self.infer_mut_expr(*expr, Mutability::Not);
}
Expr::RecordLit { path: _, fields, spread, ellipsis: _, is_assignee_expr: _ } => { Expr::RecordLit { path: _, fields, spread, ellipsis: _, is_assignee_expr: _ } => {
self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread)) self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread))
} }

View file

@ -74,6 +74,12 @@ impl<T: HasInterner<Interner = Interner>> Canonicalized<T> {
} }
} }
/// Check if types unify.
///
/// Note that we consider placeholder types to unify with everything.
/// This means that there may be some unresolved goals that actually set bounds for the placeholder
/// type for the types to unify. For example `Option<T>` and `Option<U>` unify although there is
/// unresolved goal `T = U`.
pub fn could_unify( pub fn could_unify(
db: &dyn HirDatabase, db: &dyn HirDatabase,
env: Arc<TraitEnvironment>, env: Arc<TraitEnvironment>,
@ -82,21 +88,35 @@ pub fn could_unify(
unify(db, env, tys).is_some() unify(db, env, tys).is_some()
} }
/// Check if types unify eagerly making sure there are no unresolved goals.
///
/// This means that placeholder types are not considered to unify if there are any bounds set on
/// them. For example `Option<T>` and `Option<U>` do not unify as we cannot show that `T = U`
pub fn could_unify_deeply(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
tys: &Canonical<(Ty, Ty)>,
) -> bool {
let mut table = InferenceTable::new(db, env);
let vars = make_substitutions(tys, &mut table);
let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner);
let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner);
let ty1_with_vars = table.normalize_associated_types_in(ty1_with_vars);
let ty2_with_vars = table.normalize_associated_types_in(ty2_with_vars);
table.resolve_obligations_as_possible();
table.propagate_diverging_flag();
let ty1_with_vars = table.resolve_completely(ty1_with_vars);
let ty2_with_vars = table.resolve_completely(ty2_with_vars);
table.unify_deeply(&ty1_with_vars, &ty2_with_vars)
}
pub(crate) fn unify( pub(crate) fn unify(
db: &dyn HirDatabase, db: &dyn HirDatabase,
env: Arc<TraitEnvironment>, env: Arc<TraitEnvironment>,
tys: &Canonical<(Ty, Ty)>, tys: &Canonical<(Ty, Ty)>,
) -> Option<Substitution> { ) -> Option<Substitution> {
let mut table = InferenceTable::new(db, env); let mut table = InferenceTable::new(db, env);
let vars = Substitution::from_iter( let vars = make_substitutions(tys, &mut table);
Interner,
tys.binders.iter(Interner).map(|it| match &it.kind {
chalk_ir::VariableKind::Ty(_) => table.new_type_var().cast(Interner),
// FIXME: maybe wrong?
chalk_ir::VariableKind::Lifetime => table.new_type_var().cast(Interner),
chalk_ir::VariableKind::Const(ty) => table.new_const_var(ty.clone()).cast(Interner),
}),
);
let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner); let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner);
let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner); let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner);
if !table.unify(&ty1_with_vars, &ty2_with_vars) { if !table.unify(&ty1_with_vars, &ty2_with_vars) {
@ -125,6 +145,21 @@ pub(crate) fn unify(
)) ))
} }
fn make_substitutions(
tys: &chalk_ir::Canonical<(chalk_ir::Ty<Interner>, chalk_ir::Ty<Interner>)>,
table: &mut InferenceTable<'_>,
) -> chalk_ir::Substitution<Interner> {
Substitution::from_iter(
Interner,
tys.binders.iter(Interner).map(|it| match &it.kind {
chalk_ir::VariableKind::Ty(_) => table.new_type_var().cast(Interner),
// FIXME: maybe wrong?
chalk_ir::VariableKind::Lifetime => table.new_type_var().cast(Interner),
chalk_ir::VariableKind::Const(ty) => table.new_const_var(ty.clone()).cast(Interner),
}),
)
}
bitflags::bitflags! { bitflags::bitflags! {
#[derive(Default, Clone, Copy)] #[derive(Default, Clone, Copy)]
pub(crate) struct TypeVariableFlags: u8 { pub(crate) struct TypeVariableFlags: u8 {
@ -431,6 +466,18 @@ impl<'a> InferenceTable<'a> {
true true
} }
/// Unify two relatable values (e.g. `Ty`) and check whether trait goals which arise from that could be fulfilled
pub(crate) fn unify_deeply<T: ?Sized + Zip<Interner>>(&mut self, ty1: &T, ty2: &T) -> bool {
let result = match self.try_unify(ty1, ty2) {
Ok(r) => r,
Err(_) => return false,
};
result.goals.iter().all(|goal| {
let canonicalized = self.canonicalize(goal.clone());
self.try_resolve_obligation(&canonicalized).is_some()
})
}
/// Unify two relatable values (e.g. `Ty`) and return new trait goals arising from it, so the /// Unify two relatable values (e.g. `Ty`) and return new trait goals arising from it, so the
/// caller needs to deal with them. /// caller needs to deal with them.
pub(crate) fn try_unify<T: ?Sized + Zip<Interner>>( pub(crate) fn try_unify<T: ?Sized + Zip<Interner>>(
@ -501,7 +548,8 @@ impl<'a> InferenceTable<'a> {
fn register_obligation_in_env(&mut self, goal: InEnvironment<Goal>) { fn register_obligation_in_env(&mut self, goal: InEnvironment<Goal>) {
let canonicalized = self.canonicalize(goal); let canonicalized = self.canonicalize(goal);
if !self.try_resolve_obligation(&canonicalized) { let solution = self.try_resolve_obligation(&canonicalized);
if matches!(solution, Some(Solution::Ambig(_))) {
self.pending_obligations.push(canonicalized); self.pending_obligations.push(canonicalized);
} }
} }
@ -627,38 +675,35 @@ impl<'a> InferenceTable<'a> {
fn try_resolve_obligation( fn try_resolve_obligation(
&mut self, &mut self,
canonicalized: &Canonicalized<InEnvironment<Goal>>, canonicalized: &Canonicalized<InEnvironment<Goal>>,
) -> bool { ) -> Option<chalk_solve::Solution<Interner>> {
let solution = self.db.trait_solve( let solution = self.db.trait_solve(
self.trait_env.krate, self.trait_env.krate,
self.trait_env.block, self.trait_env.block,
canonicalized.value.clone(), canonicalized.value.clone(),
); );
match solution { match &solution {
Some(Solution::Unique(canonical_subst)) => { Some(Solution::Unique(canonical_subst)) => {
canonicalized.apply_solution( canonicalized.apply_solution(
self, self,
Canonical { Canonical {
binders: canonical_subst.binders, binders: canonical_subst.binders.clone(),
// FIXME: handle constraints // FIXME: handle constraints
value: canonical_subst.value.subst, value: canonical_subst.value.subst.clone(),
}, },
); );
true
} }
Some(Solution::Ambig(Guidance::Definite(substs))) => { Some(Solution::Ambig(Guidance::Definite(substs))) => {
canonicalized.apply_solution(self, substs); canonicalized.apply_solution(self, substs.clone());
false
} }
Some(_) => { Some(_) => {
// FIXME use this when trying to resolve everything at the end // FIXME use this when trying to resolve everything at the end
false
} }
None => { None => {
// FIXME obligation cannot be fulfilled => diagnostic // FIXME obligation cannot be fulfilled => diagnostic
true
} }
} }
solution
} }
pub(crate) fn callable_sig( pub(crate) fn callable_sig(

View file

@ -11,10 +11,8 @@ pub fn target_data_layout_query(
db: &dyn HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
) -> Result<Arc<TargetDataLayout>, Arc<str>> { ) -> Result<Arc<TargetDataLayout>, Arc<str>> {
let crate_graph = db.crate_graph(); match db.data_layout(krate) {
let res = crate_graph[krate].target_layout.as_deref(); Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(&it) {
match res {
Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it) {
Ok(it) => Ok(Arc::new(it)), Ok(it) => Ok(Arc::new(it)),
Err(e) => { Err(e) => {
Err(match e { Err(match e {
@ -44,6 +42,6 @@ pub fn target_data_layout_query(
}.into()) }.into())
} }
}, },
Err(e) => Err(Arc::from(&**e)), Err(e) => Err(e),
} }
} }

View file

@ -1,6 +1,7 @@
use chalk_ir::{AdtId, TyKind}; use chalk_ir::{AdtId, TyKind};
use either::Either; use either::Either;
use hir_def::db::DefDatabase; use hir_def::db::DefDatabase;
use project_model::target_data_layout::RustcDataLayoutConfig;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use test_fixture::WithFixture; use test_fixture::WithFixture;
use triomphe::Arc; use triomphe::Arc;
@ -15,13 +16,18 @@ use crate::{
mod closure; mod closure;
fn current_machine_data_layout() -> String { fn current_machine_data_layout() -> String {
project_model::target_data_layout::get(None, None, &FxHashMap::default()).unwrap() project_model::target_data_layout::get(
RustcDataLayoutConfig::Rustc(None),
None,
&FxHashMap::default(),
)
.unwrap()
} }
fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutError> { fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutError> {
let target_data_layout = current_machine_data_layout(); let target_data_layout = current_machine_data_layout();
let ra_fixture = format!( let ra_fixture = format!(
"{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\n{ra_fixture}", "//- target_data_layout: {target_data_layout}\n{minicore}//- /main.rs crate:test\n{ra_fixture}",
); );
let (db, file_ids) = TestDB::with_many_files(&ra_fixture); let (db, file_ids) = TestDB::with_many_files(&ra_fixture);
@ -70,7 +76,7 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutError> { fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutError> {
let target_data_layout = current_machine_data_layout(); let target_data_layout = current_machine_data_layout();
let ra_fixture = format!( let ra_fixture = format!(
"{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\nfn main(){{let goal = {{{ra_fixture}}};}}", "//- target_data_layout: {target_data_layout}\n{minicore}//- /main.rs crate:test\nfn main(){{let goal = {{{ra_fixture}}};}}",
); );
let (db, file_id) = TestDB::with_single_file(&ra_fixture); let (db, file_id) = TestDB::with_single_file(&ra_fixture);

View file

@ -79,8 +79,8 @@ pub use builder::{ParamKind, TyBuilder};
pub use chalk_ext::*; pub use chalk_ext::*;
pub use infer::{ pub use infer::{
closure::{CaptureKind, CapturedItem}, closure::{CaptureKind, CapturedItem},
could_coerce, could_unify, Adjust, Adjustment, AutoBorrow, BindingMode, InferenceDiagnostic, could_coerce, could_unify, could_unify_deeply, Adjust, Adjustment, AutoBorrow, BindingMode,
InferenceResult, OverloadedDeref, PointerCast, InferenceDiagnostic, InferenceResult, OverloadedDeref, PointerCast,
}; };
pub use interner::Interner; pub use interner::Interner;
pub use lower::{ pub use lower::{

View file

@ -7,6 +7,7 @@ use std::iter;
use hir_def::{DefWithBodyId, HasModule}; use hir_def::{DefWithBodyId, HasModule};
use la_arena::ArenaMap; use la_arena::ArenaMap;
use rustc_hash::FxHashMap;
use stdx::never; use stdx::never;
use triomphe::Arc; use triomphe::Arc;
@ -14,7 +15,7 @@ use crate::{
db::{HirDatabase, InternedClosure}, db::{HirDatabase, InternedClosure},
mir::Operand, mir::Operand,
utils::ClosureSubst, utils::ClosureSubst,
ClosureId, Interner, Ty, TyExt, TypeFlags, ClosureId, Interner, Substitution, Ty, TyExt, TypeFlags,
}; };
use super::{ use super::{
@ -36,11 +37,27 @@ pub struct MovedOutOfRef {
pub span: MirSpan, pub span: MirSpan,
} }
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PartiallyMoved {
pub ty: Ty,
pub span: MirSpan,
pub local: LocalId,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct BorrowRegion {
pub local: LocalId,
pub kind: BorrowKind,
pub places: Vec<MirSpan>,
}
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct BorrowckResult { pub struct BorrowckResult {
pub mir_body: Arc<MirBody>, pub mir_body: Arc<MirBody>,
pub mutability_of_locals: ArenaMap<LocalId, MutabilityReason>, pub mutability_of_locals: ArenaMap<LocalId, MutabilityReason>,
pub moved_out_of_ref: Vec<MovedOutOfRef>, pub moved_out_of_ref: Vec<MovedOutOfRef>,
pub partially_moved: Vec<PartiallyMoved>,
pub borrow_regions: Vec<BorrowRegion>,
} }
fn all_mir_bodies( fn all_mir_bodies(
@ -80,12 +97,26 @@ pub fn borrowck_query(
res.push(BorrowckResult { res.push(BorrowckResult {
mutability_of_locals: mutability_of_locals(db, &body), mutability_of_locals: mutability_of_locals(db, &body),
moved_out_of_ref: moved_out_of_ref(db, &body), moved_out_of_ref: moved_out_of_ref(db, &body),
partially_moved: partially_moved(db, &body),
borrow_regions: borrow_regions(db, &body),
mir_body: body, mir_body: body,
}); });
})?; })?;
Ok(res.into()) Ok(res.into())
} }
fn make_fetch_closure_field(
db: &dyn HirDatabase,
) -> impl FnOnce(ClosureId, &Substitution, usize) -> Ty + '_ {
|c: ClosureId, subst: &Substitution, f: usize| {
let InternedClosure(def, _) = db.lookup_intern_closure(c.into());
let infer = db.infer(def);
let (captures, _) = infer.closure_info(&c);
let parent_subst = ClosureSubst(subst).parent_subst();
captures.get(f).expect("broken closure field").ty.clone().substitute(Interner, parent_subst)
}
}
fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef> { fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef> {
let mut result = vec![]; let mut result = vec![];
let mut for_operand = |op: &Operand, span: MirSpan| match op { let mut for_operand = |op: &Operand, span: MirSpan| match op {
@ -99,18 +130,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
ty = proj.projected_ty( ty = proj.projected_ty(
ty, ty,
db, db,
|c, subst, f| { make_fetch_closure_field(db),
let InternedClosure(def, _) = db.lookup_intern_closure(c.into());
let infer = db.infer(def);
let (captures, _) = infer.closure_info(&c);
let parent_subst = ClosureSubst(subst).parent_subst();
captures
.get(f)
.expect("broken closure field")
.ty
.clone()
.substitute(Interner, parent_subst)
},
body.owner.module(db.upcast()).krate(), body.owner.module(db.upcast()).krate(),
); );
} }
@ -188,6 +208,132 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
result result
} }
fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec<PartiallyMoved> {
let mut result = vec![];
let mut for_operand = |op: &Operand, span: MirSpan| match op {
Operand::Copy(p) | Operand::Move(p) => {
let mut ty: Ty = body.locals[p.local].ty.clone();
for proj in p.projection.lookup(&body.projection_store) {
ty = proj.projected_ty(
ty,
db,
make_fetch_closure_field(db),
body.owner.module(db.upcast()).krate(),
);
}
if !ty.clone().is_copy(db, body.owner)
&& !ty.data(Interner).flags.intersects(TypeFlags::HAS_ERROR)
{
result.push(PartiallyMoved { span, ty, local: p.local });
}
}
Operand::Constant(_) | Operand::Static(_) => (),
};
for (_, block) in body.basic_blocks.iter() {
db.unwind_if_cancelled();
for statement in &block.statements {
match &statement.kind {
StatementKind::Assign(_, r) => match r {
Rvalue::ShallowInitBoxWithAlloc(_) => (),
Rvalue::ShallowInitBox(o, _)
| Rvalue::UnaryOp(_, o)
| Rvalue::Cast(_, o, _)
| Rvalue::Repeat(o, _)
| Rvalue::Use(o) => for_operand(o, statement.span),
Rvalue::CopyForDeref(_)
| Rvalue::Discriminant(_)
| Rvalue::Len(_)
| Rvalue::Ref(_, _) => (),
Rvalue::CheckedBinaryOp(_, o1, o2) => {
for_operand(o1, statement.span);
for_operand(o2, statement.span);
}
Rvalue::Aggregate(_, ops) => {
for op in ops.iter() {
for_operand(op, statement.span);
}
}
},
StatementKind::FakeRead(_)
| StatementKind::Deinit(_)
| StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::Nop => (),
}
}
match &block.terminator {
Some(terminator) => match &terminator.kind {
TerminatorKind::SwitchInt { discr, .. } => for_operand(discr, terminator.span),
TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. }
| TerminatorKind::Goto { .. }
| TerminatorKind::UnwindResume
| TerminatorKind::CoroutineDrop
| TerminatorKind::Abort
| TerminatorKind::Return
| TerminatorKind::Unreachable
| TerminatorKind::Drop { .. } => (),
TerminatorKind::DropAndReplace { value, .. } => {
for_operand(value, terminator.span);
}
TerminatorKind::Call { func, args, .. } => {
for_operand(func, terminator.span);
args.iter().for_each(|it| for_operand(it, terminator.span));
}
TerminatorKind::Assert { cond, .. } => {
for_operand(cond, terminator.span);
}
TerminatorKind::Yield { value, .. } => {
for_operand(value, terminator.span);
}
},
None => (),
}
}
result.shrink_to_fit();
result
}
fn borrow_regions(db: &dyn HirDatabase, body: &MirBody) -> Vec<BorrowRegion> {
let mut borrows = FxHashMap::default();
for (_, block) in body.basic_blocks.iter() {
db.unwind_if_cancelled();
for statement in &block.statements {
if let StatementKind::Assign(_, Rvalue::Ref(kind, p)) = &statement.kind {
borrows
.entry(p.local)
.and_modify(|it: &mut BorrowRegion| {
it.places.push(statement.span);
})
.or_insert_with(|| BorrowRegion {
local: p.local,
kind: *kind,
places: vec![statement.span],
});
}
}
match &block.terminator {
Some(terminator) => match &terminator.kind {
TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. }
| TerminatorKind::Goto { .. }
| TerminatorKind::UnwindResume
| TerminatorKind::CoroutineDrop
| TerminatorKind::Abort
| TerminatorKind::Return
| TerminatorKind::Unreachable
| TerminatorKind::Drop { .. } => (),
TerminatorKind::DropAndReplace { .. } => {}
TerminatorKind::Call { .. } => {}
_ => (),
},
None => (),
}
}
borrows.into_values().collect()
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum ProjectionCase { enum ProjectionCase {
/// Projection is a local /// Projection is a local
@ -217,18 +363,7 @@ fn place_case(db: &dyn HirDatabase, body: &MirBody, lvalue: &Place) -> Projectio
ty = proj.projected_ty( ty = proj.projected_ty(
ty, ty,
db, db,
|c, subst, f| { make_fetch_closure_field(db),
let InternedClosure(def, _) = db.lookup_intern_closure(c.into());
let infer = db.infer(def);
let (captures, _) = infer.closure_info(&c);
let parent_subst = ClosureSubst(subst).parent_subst();
captures
.get(f)
.expect("broken closure field")
.ty
.clone()
.substitute(Interner, parent_subst)
},
body.owner.module(db.upcast()).krate(), body.owner.module(db.upcast()).krate(),
); );
} }

View file

@ -4,11 +4,7 @@
use std::cmp; use std::cmp;
use chalk_ir::TyKind; use chalk_ir::TyKind;
use hir_def::{ use hir_def::builtin_type::{BuiltinInt, BuiltinUint};
builtin_type::{BuiltinInt, BuiltinUint},
resolver::HasResolver,
};
use hir_expand::mod_path::ModPath;
use super::*; use super::*;

View file

@ -1,6 +1,6 @@
//! This module generates a polymorphic MIR from a hir body //! This module generates a polymorphic MIR from a hir body
use std::{fmt::Write, iter, mem}; use std::{fmt::Write, mem};
use base_db::{salsa::Cycle, FileId}; use base_db::{salsa::Cycle, FileId};
use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind}; use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind};
@ -14,23 +14,19 @@ use hir_def::{
lang_item::{LangItem, LangItemTarget}, lang_item::{LangItem, LangItemTarget},
path::Path, path::Path,
resolver::{resolver_for_expr, HasResolver, ResolveValueResult, ValueNs}, resolver::{resolver_for_expr, HasResolver, ResolveValueResult, ValueNs},
AdtId, DefWithBodyId, EnumVariantId, GeneralConstId, HasModule, ItemContainerId, LocalFieldId, AdtId, EnumVariantId, GeneralConstId, HasModule, ItemContainerId, LocalFieldId,
Lookup, TraitId, TupleId, TypeOrConstParamId, Lookup, TraitId, TupleId, TypeOrConstParamId,
}; };
use hir_expand::name::Name; use hir_expand::name::Name;
use la_arena::ArenaMap;
use rustc_hash::FxHashMap;
use syntax::TextRange; use syntax::TextRange;
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
consteval::ConstEvalError, consteval::ConstEvalError,
db::{HirDatabase, InternedClosure}, db::InternedClosure,
display::HirDisplay,
infer::{CaptureKind, CapturedItem, TypeMismatch}, infer::{CaptureKind, CapturedItem, TypeMismatch},
inhabitedness::is_ty_uninhabited_from, inhabitedness::is_ty_uninhabited_from,
layout::LayoutError, layout::LayoutError,
mapping::ToChalk,
static_lifetime, static_lifetime,
traits::FnTrait, traits::FnTrait,
utils::{generics, ClosureSubst}, utils::{generics, ClosureSubst},
@ -775,6 +771,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.set_terminator(current, TerminatorKind::Return, expr_id.into()); self.set_terminator(current, TerminatorKind::Return, expr_id.into());
Ok(None) Ok(None)
} }
Expr::Become { .. } => not_supported!("tail-calls"),
Expr::Yield { .. } => not_supported!("yield"), Expr::Yield { .. } => not_supported!("yield"),
Expr::RecordLit { fields, path, spread, ellipsis: _, is_assignee_expr: _ } => { Expr::RecordLit { fields, path, spread, ellipsis: _, is_assignee_expr: _ } => {
let spread_place = match spread { let spread_place = match spread {
@ -1246,7 +1243,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.push_assignment(current, place, op.into(), expr_id.into()); self.push_assignment(current, place, op.into(), expr_id.into());
Ok(Some(current)) Ok(Some(current))
} }
Expr::Underscore => not_supported!("underscore"), Expr::Underscore => Ok(Some(current)),
} }
} }
@ -1780,6 +1777,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.push_fake_read(c, p, expr.into()); self.push_fake_read(c, p, expr.into());
current = scope2.pop_and_drop(self, c, expr.into()); current = scope2.pop_and_drop(self, c, expr.into());
} }
hir_def::hir::Statement::Item => (),
} }
} }
if let Some(tail) = tail { if let Some(tail) = tail {

View file

@ -1,6 +1,6 @@
//! MIR lowering for patterns //! MIR lowering for patterns
use hir_def::{hir::LiteralOrConst, resolver::HasResolver, AssocItemId}; use hir_def::AssocItemId;
use crate::BindingMode; use crate::BindingMode;

View file

@ -1,3 +1,5 @@
use crate::tests::check_no_mismatches;
use super::check; use super::check;
#[test] #[test]
@ -94,3 +96,43 @@ fn test(x: bool) {
"#, "#,
); );
} }
#[test]
fn no_mismatches_on_atpit() {
check_no_mismatches(
r#"
//- minicore: option, sized
#![feature(impl_trait_in_assoc_type)]
trait WrappedAssoc {
type Assoc;
fn do_thing(&self) -> Option<Self::Assoc>;
}
struct Foo;
impl WrappedAssoc for Foo {
type Assoc = impl Sized;
fn do_thing(&self) -> Option<Self::Assoc> {
Some(())
}
}
"#,
);
check_no_mismatches(
r#"
//- minicore: option, sized
#![feature(impl_trait_in_assoc_type)]
trait Trait {
type Assoc;
const DEFINE: Option<Self::Assoc>;
}
impl Trait for () {
type Assoc = impl Sized;
const DEFINE: Option<Self::Assoc> = Option::Some(());
}
"#,
);
}

View file

@ -3376,11 +3376,8 @@ fn main() {
[x,] = &[1,]; [x,] = &[1,];
//^^^^expected &[i32; 1], got [{unknown}; _] //^^^^expected &[i32; 1], got [{unknown}; _]
// FIXME we only want the outermost error, but this matches the current
// behavior of slice patterns
let x; let x;
[(x,),] = &[(1,),]; [(x,),] = &[(1,),];
// ^^^^expected {unknown}, got ({unknown},)
//^^^^^^^expected &[(i32,); 1], got [{unknown}; _] //^^^^^^^expected &[(i32,); 1], got [{unknown}; _]
let x; let x;

View file

@ -31,6 +31,7 @@ mod has_source;
pub mod db; pub mod db;
pub mod diagnostics; pub mod diagnostics;
pub mod symbols; pub mod symbols;
pub mod term_search;
mod display; mod display;
@ -1084,6 +1085,27 @@ impl Field {
Type::new(db, var_id, ty) Type::new(db, var_id, ty)
} }
// FIXME: Find better API to also handle const generics
pub fn ty_with_args(&self, db: &dyn HirDatabase, generics: impl Iterator<Item = Type>) -> Type {
let var_id = self.parent.into();
let def_id: AdtId = match self.parent {
VariantDef::Struct(it) => it.id.into(),
VariantDef::Union(it) => it.id.into(),
VariantDef::Variant(it) => it.parent_enum(db).id.into(),
};
let mut generics = generics.map(|it| it.ty.clone());
let substs = TyBuilder::subst_for_def(db, def_id, None)
.fill(|x| match x {
ParamKind::Type => {
generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner)
}
ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
})
.build();
let ty = db.field_types(var_id)[self.id].clone().substitute(Interner, &substs);
Type::new(db, var_id, ty)
}
pub fn layout(&self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> { pub fn layout(&self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
db.layout_of_ty( db.layout_of_ty(
self.ty(db).ty, self.ty(db).ty,
@ -1152,6 +1174,10 @@ impl Struct {
fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> { fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
db.struct_data(self.id).variant_data.clone() db.struct_data(self.id).variant_data.clone()
} }
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
db.attrs(self.id.into()).is_unstable()
}
} }
impl HasVisibility for Struct { impl HasVisibility for Struct {
@ -1194,6 +1220,10 @@ impl Union {
fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> { fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
db.union_data(self.id).variant_data.clone() db.union_data(self.id).variant_data.clone()
} }
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
db.attrs(self.id.into()).is_unstable()
}
} }
impl HasVisibility for Union { impl HasVisibility for Union {
@ -1269,6 +1299,10 @@ impl Enum {
pub fn layout(self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> { pub fn layout(self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
Adt::from(self).layout(db) Adt::from(self).layout(db)
} }
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
db.attrs(self.id.into()).is_unstable()
}
} }
impl HasVisibility for Enum { impl HasVisibility for Enum {
@ -1344,6 +1378,10 @@ impl Variant {
_ => parent_layout, _ => parent_layout,
}) })
} }
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
db.attrs(self.id.into()).is_unstable()
}
} }
/// Variants inherit visibility from the parent enum. /// Variants inherit visibility from the parent enum.
@ -1394,9 +1432,9 @@ impl Adt {
/// Turns this ADT into a type with the given type parameters. This isn't /// Turns this ADT into a type with the given type parameters. This isn't
/// the greatest API, FIXME find a better one. /// the greatest API, FIXME find a better one.
pub fn ty_with_args(self, db: &dyn HirDatabase, args: &[Type]) -> Type { pub fn ty_with_args(self, db: &dyn HirDatabase, args: impl Iterator<Item = Type>) -> Type {
let id = AdtId::from(self); let id = AdtId::from(self);
let mut it = args.iter().map(|t| t.ty.clone()); let mut it = args.map(|t| t.ty.clone());
let ty = TyBuilder::def_ty(db, id.into(), None) let ty = TyBuilder::def_ty(db, id.into(), None)
.fill(|x| { .fill(|x| {
let r = it.next().unwrap_or_else(|| TyKind::Error.intern(Interner)); let r = it.next().unwrap_or_else(|| TyKind::Error.intern(Interner));
@ -1789,6 +1827,35 @@ impl Function {
Type::new_with_resolver_inner(db, &resolver, ty) Type::new_with_resolver_inner(db, &resolver, ty)
} }
// FIXME: Find better API to also handle const generics
pub fn ret_type_with_args(
self,
db: &dyn HirDatabase,
generics: impl Iterator<Item = Type>,
) -> Type {
let resolver = self.id.resolver(db.upcast());
let parent_id: Option<GenericDefId> = match self.id.lookup(db.upcast()).container {
ItemContainerId::ImplId(it) => Some(it.into()),
ItemContainerId::TraitId(it) => Some(it.into()),
ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
};
let mut generics = generics.map(|it| it.ty.clone());
let mut filler = |x: &_| match x {
ParamKind::Type => {
generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner)
}
ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
};
let parent_substs =
parent_id.map(|id| TyBuilder::subst_for_def(db, id, None).fill(&mut filler).build());
let substs = TyBuilder::subst_for_def(db, self.id, parent_substs).fill(&mut filler).build();
let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
let ty = callable_sig.ret().clone();
Type::new_with_resolver_inner(db, &resolver, ty)
}
pub fn async_ret_type(self, db: &dyn HirDatabase) -> Option<Type> { pub fn async_ret_type(self, db: &dyn HirDatabase) -> Option<Type> {
if !self.is_async(db) { if !self.is_async(db) {
return None; return None;
@ -1855,6 +1922,51 @@ impl Function {
.collect() .collect()
} }
// FIXME: Find better API to also handle const generics
pub fn params_without_self_with_args(
self,
db: &dyn HirDatabase,
generics: impl Iterator<Item = Type>,
) -> Vec<Param> {
let environment = db.trait_environment(self.id.into());
let parent_id: Option<GenericDefId> = match self.id.lookup(db.upcast()).container {
ItemContainerId::ImplId(it) => Some(it.into()),
ItemContainerId::TraitId(it) => Some(it.into()),
ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
};
let mut generics = generics.map(|it| it.ty.clone());
let parent_substs = parent_id.map(|id| {
TyBuilder::subst_for_def(db, id, None)
.fill(|x| match x {
ParamKind::Type => generics
.next()
.unwrap_or_else(|| TyKind::Error.intern(Interner))
.cast(Interner),
ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
})
.build()
});
let substs = TyBuilder::subst_for_def(db, self.id, parent_substs)
.fill(|_| {
let ty = generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner));
GenericArg::new(Interner, GenericArgData::Ty(ty))
})
.build();
let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
let skip = if db.function_data(self.id).has_self_param() { 1 } else { 0 };
callable_sig
.params()
.iter()
.enumerate()
.skip(skip)
.map(|(idx, ty)| {
let ty = Type { env: environment.clone(), ty: ty.clone() };
Param { func: self, ty, idx }
})
.collect()
}
pub fn is_const(self, db: &dyn HirDatabase) -> bool { pub fn is_const(self, db: &dyn HirDatabase) -> bool {
db.function_data(self.id).has_const_kw() db.function_data(self.id).has_const_kw()
} }
@ -1889,6 +2001,11 @@ impl Function {
db.function_data(self.id).attrs.is_bench() db.function_data(self.id).attrs.is_bench()
} }
/// Is this function marked as unstable with `#[feature]` attribute?
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
db.function_data(self.id).attrs.is_unstable()
}
pub fn is_unsafe_to_call(self, db: &dyn HirDatabase) -> bool { pub fn is_unsafe_to_call(self, db: &dyn HirDatabase) -> bool {
hir_ty::is_fn_unsafe_to_call(db, self.id) hir_ty::is_fn_unsafe_to_call(db, self.id)
} }
@ -2052,6 +2169,34 @@ impl SelfParam {
let ty = callable_sig.params()[0].clone(); let ty = callable_sig.params()[0].clone();
Type { env: environment, ty } Type { env: environment, ty }
} }
// FIXME: Find better API to also handle const generics
pub fn ty_with_args(&self, db: &dyn HirDatabase, generics: impl Iterator<Item = Type>) -> Type {
let parent_id: GenericDefId = match self.func.lookup(db.upcast()).container {
ItemContainerId::ImplId(it) => it.into(),
ItemContainerId::TraitId(it) => it.into(),
ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
panic!("Never get here")
}
};
let mut generics = generics.map(|it| it.ty.clone());
let mut filler = |x: &_| match x {
ParamKind::Type => {
generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner)
}
ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
};
let parent_substs = TyBuilder::subst_for_def(db, parent_id, None).fill(&mut filler).build();
let substs =
TyBuilder::subst_for_def(db, self.func, Some(parent_substs)).fill(&mut filler).build();
let callable_sig =
db.callable_item_signature(self.func.into()).substitute(Interner, &substs);
let environment = db.trait_environment(self.func.into());
let ty = callable_sig.params()[0].clone();
Type { env: environment, ty }
}
} }
impl HasVisibility for Function { impl HasVisibility for Function {
@ -2754,7 +2899,7 @@ impl GenericDef {
.collect() .collect()
} }
pub fn type_params(self, db: &dyn HirDatabase) -> Vec<TypeOrConstParam> { pub fn type_or_const_params(self, db: &dyn HirDatabase) -> Vec<TypeOrConstParam> {
let generics = db.generic_params(self.into()); let generics = db.generic_params(self.into());
generics generics
.type_or_consts .type_or_consts
@ -3126,12 +3271,16 @@ impl TypeParam {
let ty = generic_arg_from_param(db, self.id.into())?; let ty = generic_arg_from_param(db, self.id.into())?;
let resolver = self.id.parent().resolver(db.upcast()); let resolver = self.id.parent().resolver(db.upcast());
match ty.data(Interner) { match ty.data(Interner) {
GenericArgData::Ty(it) => { GenericArgData::Ty(it) if *it.kind(Interner) != TyKind::Error => {
Some(Type::new_with_resolver_inner(db, &resolver, it.clone())) Some(Type::new_with_resolver_inner(db, &resolver, it.clone()))
} }
_ => None, _ => None,
} }
} }
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
db.attrs(GenericParamId::from(self.id).into()).is_unstable()
}
} }
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
@ -3241,6 +3390,26 @@ impl TypeOrConstParam {
Either::Right(it) => it.ty(db), Either::Right(it) => it.ty(db),
} }
} }
pub fn as_type_param(self, db: &dyn HirDatabase) -> Option<TypeParam> {
let params = db.generic_params(self.id.parent);
match &params.type_or_consts[self.id.local_id] {
hir_def::generics::TypeOrConstParamData::TypeParamData(_) => {
Some(TypeParam { id: TypeParamId::from_unchecked(self.id) })
}
hir_def::generics::TypeOrConstParamData::ConstParamData(_) => None,
}
}
pub fn as_const_param(self, db: &dyn HirDatabase) -> Option<ConstParam> {
let params = db.generic_params(self.id.parent);
match &params.type_or_consts[self.id.local_id] {
hir_def::generics::TypeOrConstParamData::TypeParamData(_) => None,
hir_def::generics::TypeOrConstParamData::ConstParamData(_) => {
Some(ConstParam { id: ConstParamId::from_unchecked(self.id) })
}
}
}
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -3285,12 +3454,11 @@ impl Impl {
.filter(filter), .filter(filter),
) )
}); });
for id in def_crates for id in def_crates
.iter() .iter()
.flat_map(|&id| Crate { id }.transitive_reverse_dependencies(db)) .flat_map(|&id| Crate { id }.transitive_reverse_dependencies(db))
.map(|Crate { id }| id) .map(|Crate { id }| id)
.chain(def_crates.iter().copied())
.unique()
{ {
all.extend( all.extend(
db.trait_impls_in_crate(id) db.trait_impls_in_crate(id)
@ -3520,7 +3688,7 @@ pub enum CaptureKind {
Move, Move,
} }
#[derive(Clone, PartialEq, Eq, Debug)] #[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub struct Type { pub struct Type {
env: Arc<TraitEnvironment>, env: Arc<TraitEnvironment>,
ty: Ty, ty: Ty,
@ -3620,6 +3788,50 @@ impl Type {
matches!(self.ty.kind(Interner), TyKind::Ref(..)) matches!(self.ty.kind(Interner), TyKind::Ref(..))
} }
pub fn contains_reference(&self, db: &dyn HirDatabase) -> bool {
return go(db, self.env.krate, &self.ty);
fn go(db: &dyn HirDatabase, krate: CrateId, ty: &Ty) -> bool {
match ty.kind(Interner) {
// Reference itself
TyKind::Ref(_, _, _) => true,
// For non-phantom_data adts we check variants/fields as well as generic parameters
TyKind::Adt(adt_id, substitution)
if !db.struct_datum(krate, *adt_id).flags.phantom_data =>
{
let adt_datum = &db.struct_datum(krate, *adt_id);
let adt_datum_bound =
adt_datum.binders.clone().substitute(Interner, substitution);
adt_datum_bound
.variants
.into_iter()
.flat_map(|variant| variant.fields.into_iter())
.any(|ty| go(db, krate, &ty))
|| substitution
.iter(Interner)
.filter_map(|x| x.ty(Interner))
.any(|ty| go(db, krate, ty))
}
// And for `PhantomData<T>`, we check `T`.
TyKind::Adt(_, substitution)
| TyKind::Tuple(_, substitution)
| TyKind::OpaqueType(_, substitution)
| TyKind::AssociatedType(_, substitution)
| TyKind::FnDef(_, substitution) => substitution
.iter(Interner)
.filter_map(|x| x.ty(Interner))
.any(|ty| go(db, krate, ty)),
// For `[T]` or `*T` we check `T`
TyKind::Array(ty, _) | TyKind::Slice(ty) | TyKind::Raw(_, ty) => go(db, krate, ty),
// Consider everything else as not reference
_ => false,
}
}
}
pub fn as_reference(&self) -> Option<(Type, Mutability)> { pub fn as_reference(&self) -> Option<(Type, Mutability)> {
let (ty, _lt, m) = self.ty.as_reference()?; let (ty, _lt, m) = self.ty.as_reference()?;
let m = Mutability::from_mutable(matches!(m, hir_ty::Mutability::Mut)); let m = Mutability::from_mutable(matches!(m, hir_ty::Mutability::Mut));
@ -3727,14 +3939,16 @@ impl Type {
) )
} }
// FIXME: Find better API that also handles const generics
pub fn impls_trait(&self, db: &dyn HirDatabase, trait_: Trait, args: &[Type]) -> bool { pub fn impls_trait(&self, db: &dyn HirDatabase, trait_: Trait, args: &[Type]) -> bool {
let mut it = args.iter().map(|t| t.ty.clone()); let mut it = args.iter().map(|t| t.ty.clone());
let trait_ref = TyBuilder::trait_ref(db, trait_.id) let trait_ref = TyBuilder::trait_ref(db, trait_.id)
.push(self.ty.clone()) .push(self.ty.clone())
.fill(|x| { .fill(|x| {
let r = it.next().unwrap();
match x { match x {
ParamKind::Type => r.cast(Interner), ParamKind::Type => {
it.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner)
}
ParamKind::Const(ty) => { ParamKind::Const(ty) => {
// FIXME: this code is not covered in tests. // FIXME: this code is not covered in tests.
unknown_const_as_generic(ty.clone()) unknown_const_as_generic(ty.clone())
@ -4368,12 +4582,24 @@ impl Type {
walk_type(db, self, &mut cb); walk_type(db, self, &mut cb);
} }
/// Check if type unifies with another type.
///
/// Note that we consider placeholder types to unify with everything.
/// For example `Option<T>` and `Option<U>` unify although there is unresolved goal `T = U`.
pub fn could_unify_with(&self, db: &dyn HirDatabase, other: &Type) -> bool { pub fn could_unify_with(&self, db: &dyn HirDatabase, other: &Type) -> bool {
let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), other.ty.clone())); let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), other.ty.clone()));
hir_ty::could_unify(db, self.env.clone(), &tys) hir_ty::could_unify(db, self.env.clone(), &tys)
} }
/// Check if type unifies with another type eagerly making sure there are no unresolved goals.
///
/// This means that placeholder types are not considered to unify if there are any bounds set on
/// them. For example `Option<T>` and `Option<U>` do not unify as we cannot show that `T = U`
pub fn could_unify_with_deeply(&self, db: &dyn HirDatabase, other: &Type) -> bool {
let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), other.ty.clone()));
hir_ty::could_unify_deeply(db, self.env.clone(), &tys)
}
pub fn could_coerce_to(&self, db: &dyn HirDatabase, to: &Type) -> bool { pub fn could_coerce_to(&self, db: &dyn HirDatabase, to: &Type) -> bool {
let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), to.ty.clone())); let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), to.ty.clone()));
hir_ty::could_coerce(db, self.env.clone(), &tys) hir_ty::could_coerce(db, self.env.clone(), &tys)

View file

@ -0,0 +1,298 @@
//! Term search
use hir_def::type_ref::Mutability;
use hir_ty::db::HirDatabase;
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use crate::{ModuleDef, ScopeDef, Semantics, SemanticsScope, Type};
mod expr;
pub use expr::Expr;
mod tactics;
/// Key for lookup table to query new types reached.
#[derive(Debug, Hash, PartialEq, Eq)]
enum NewTypesKey {
ImplMethod,
StructProjection,
}
/// Helper enum to squash big number of alternative trees into `Many` variant as there is too many
/// to take into account.
#[derive(Debug)]
enum AlternativeExprs {
/// There are few trees, so we keep track of them all
Few(FxHashSet<Expr>),
/// There are too many trees to keep track of
Many,
}
impl AlternativeExprs {
/// Construct alternative trees
///
/// # Arguments
/// `threshold` - threshold value for many trees (more than that is many)
/// `exprs` - expressions iterator
fn new(threshold: usize, exprs: impl Iterator<Item = Expr>) -> AlternativeExprs {
let mut it = AlternativeExprs::Few(Default::default());
it.extend_with_threshold(threshold, exprs);
it
}
/// Get type trees stored in alternative trees (or `Expr::Many` in case of many)
///
/// # Arguments
/// `ty` - Type of expressions queried (this is used to give type to `Expr::Many`)
fn exprs(&self, ty: &Type) -> Vec<Expr> {
match self {
AlternativeExprs::Few(exprs) => exprs.iter().cloned().collect(),
AlternativeExprs::Many => vec![Expr::Many(ty.clone())],
}
}
/// Extend alternative expressions
///
/// # Arguments
/// `threshold` - threshold value for many trees (more than that is many)
/// `exprs` - expressions iterator
fn extend_with_threshold(&mut self, threshold: usize, exprs: impl Iterator<Item = Expr>) {
match self {
AlternativeExprs::Few(tts) => {
for it in exprs {
if tts.len() > threshold {
*self = AlternativeExprs::Many;
break;
}
tts.insert(it);
}
}
AlternativeExprs::Many => (),
}
}
}
/// # Lookup table for term search
///
/// Lookup table keeps all the state during term search.
/// This means it knows what types and how are reachable.
///
/// The secondary functionality for lookup table is to keep track of new types reached since last
/// iteration as well as keeping track of which `ScopeDef` items have been used.
/// Both of them are to speed up the term search by leaving out types / ScopeDefs that likely do
/// not produce any new results.
#[derive(Default, Debug)]
struct LookupTable {
/// All the `Expr`s in "value" produce the type of "key"
data: FxHashMap<Type, AlternativeExprs>,
/// New types reached since last query by the `NewTypesKey`
new_types: FxHashMap<NewTypesKey, Vec<Type>>,
/// ScopeDefs that are not interesting any more
exhausted_scopedefs: FxHashSet<ScopeDef>,
/// ScopeDefs that were used in current round
round_scopedef_hits: FxHashSet<ScopeDef>,
/// Amount of rounds since scopedef was first used.
rounds_since_sopedef_hit: FxHashMap<ScopeDef, u32>,
/// Types queried but not present
types_wishlist: FxHashSet<Type>,
/// Threshold to squash trees to `Many`
many_threshold: usize,
}
impl LookupTable {
/// Initialize lookup table
fn new(many_threshold: usize) -> Self {
let mut res = Self { many_threshold, ..Default::default() };
res.new_types.insert(NewTypesKey::ImplMethod, Vec::new());
res.new_types.insert(NewTypesKey::StructProjection, Vec::new());
res
}
/// Find all `Expr`s that unify with the `ty`
fn find(&self, db: &dyn HirDatabase, ty: &Type) -> Option<Vec<Expr>> {
self.data
.iter()
.find(|(t, _)| t.could_unify_with_deeply(db, ty))
.map(|(t, tts)| tts.exprs(t))
}
/// Same as find but automatically creates shared reference of types in the lookup
///
/// For example if we have type `i32` in data and we query for `&i32` it map all the type
/// trees we have for `i32` with `Expr::Reference` and returns them.
fn find_autoref(&self, db: &dyn HirDatabase, ty: &Type) -> Option<Vec<Expr>> {
self.data
.iter()
.find(|(t, _)| t.could_unify_with_deeply(db, ty))
.map(|(t, it)| it.exprs(t))
.or_else(|| {
self.data
.iter()
.find(|(t, _)| {
Type::reference(t, Mutability::Shared).could_unify_with_deeply(db, ty)
})
.map(|(t, it)| {
it.exprs(t)
.into_iter()
.map(|expr| Expr::Reference(Box::new(expr)))
.collect()
})
})
}
/// Insert new type trees for type
///
/// Note that the types have to be the same, unification is not enough as unification is not
/// transitive. For example Vec<i32> and FxHashSet<i32> both unify with Iterator<Item = i32>,
/// but they clearly do not unify themselves.
fn insert(&mut self, ty: Type, exprs: impl Iterator<Item = Expr>) {
match self.data.get_mut(&ty) {
Some(it) => it.extend_with_threshold(self.many_threshold, exprs),
None => {
self.data.insert(ty.clone(), AlternativeExprs::new(self.many_threshold, exprs));
for it in self.new_types.values_mut() {
it.push(ty.clone());
}
}
}
}
/// Iterate all the reachable types
fn iter_types(&self) -> impl Iterator<Item = Type> + '_ {
self.data.keys().cloned()
}
/// Query new types reached since last query by key
///
/// Create new key if you wish to query it to avoid conflicting with existing queries.
fn new_types(&mut self, key: NewTypesKey) -> Vec<Type> {
match self.new_types.get_mut(&key) {
Some(it) => std::mem::take(it),
None => Vec::new(),
}
}
/// Mark `ScopeDef` as exhausted meaning it is not interesting for us any more
fn mark_exhausted(&mut self, def: ScopeDef) {
self.exhausted_scopedefs.insert(def);
}
/// Mark `ScopeDef` as used meaning we managed to produce something useful from it
fn mark_fulfilled(&mut self, def: ScopeDef) {
self.round_scopedef_hits.insert(def);
}
/// Start new round (meant to be called at the beginning of iteration in `term_search`)
///
/// This functions marks some `ScopeDef`s as exhausted if there have been
/// `MAX_ROUNDS_AFTER_HIT` rounds after first using a `ScopeDef`.
fn new_round(&mut self) {
for def in &self.round_scopedef_hits {
let hits =
self.rounds_since_sopedef_hit.entry(*def).and_modify(|n| *n += 1).or_insert(0);
const MAX_ROUNDS_AFTER_HIT: u32 = 2;
if *hits > MAX_ROUNDS_AFTER_HIT {
self.exhausted_scopedefs.insert(*def);
}
}
self.round_scopedef_hits.clear();
}
/// Get exhausted `ScopeDef`s
fn exhausted_scopedefs(&self) -> &FxHashSet<ScopeDef> {
&self.exhausted_scopedefs
}
/// Types queried but not found
fn take_types_wishlist(&mut self) -> FxHashSet<Type> {
std::mem::take(&mut self.types_wishlist)
}
}
/// Context for the `term_search` function
#[derive(Debug)]
pub struct TermSearchCtx<'a, DB: HirDatabase> {
/// Semantics for the program
pub sema: &'a Semantics<'a, DB>,
/// Semantic scope, captures context for the term search
pub scope: &'a SemanticsScope<'a>,
/// Target / expected output type
pub goal: Type,
/// Configuration for term search
pub config: TermSearchConfig,
}
/// Configuration options for the term search
#[derive(Debug, Clone, Copy)]
pub struct TermSearchConfig {
/// Enable borrow checking, this guarantees the outputs of the `term_search` to borrow-check
pub enable_borrowcheck: bool,
/// Indicate when to squash multiple trees to `Many` as there are too many to keep track
pub many_alternatives_threshold: usize,
/// Depth of the search eg. number of cycles to run
pub depth: usize,
}
impl Default for TermSearchConfig {
fn default() -> Self {
Self { enable_borrowcheck: true, many_alternatives_threshold: 1, depth: 6 }
}
}
/// # Term search
///
/// Search for terms (expressions) that unify with the `goal` type.
///
/// # Arguments
/// * `ctx` - Context for term search
///
/// Internally this function uses Breadth First Search to find path to `goal` type.
/// The general idea is following:
/// 1. Populate lookup (frontier for BFS) from values (local variables, statics, constants, etc)
/// as well as from well knows values (such as `true/false` and `()`)
/// 2. Iteratively expand the frontier (or contents of the lookup) by trying different type
/// transformation tactics. For example functions take as from set of types (arguments) to some
/// type (return type). Other transformations include methods on type, type constructors and
/// projections to struct fields (field access).
/// 3. Once we manage to find path to type we are interested in we continue for single round to see
/// if we can find more paths that take us to the `goal` type.
/// 4. Return all the paths (type trees) that take us to the `goal` type.
///
/// Note that there are usually more ways we can get to the `goal` type but some are discarded to
/// reduce the memory consumption. It is also unlikely anyone is willing ti browse through
/// thousands of possible responses so we currently take first 10 from every tactic.
pub fn term_search<DB: HirDatabase>(ctx: &TermSearchCtx<'_, DB>) -> Vec<Expr> {
let module = ctx.scope.module();
let mut defs = FxHashSet::default();
defs.insert(ScopeDef::ModuleDef(ModuleDef::Module(module)));
ctx.scope.process_all_names(&mut |_, def| {
defs.insert(def);
});
let mut lookup = LookupTable::new(ctx.config.many_alternatives_threshold);
// Try trivial tactic first, also populates lookup table
let mut solutions: Vec<Expr> = tactics::trivial(ctx, &defs, &mut lookup).collect();
// Use well known types tactic before iterations as it does not depend on other tactics
solutions.extend(tactics::famous_types(ctx, &defs, &mut lookup));
for _ in 0..ctx.config.depth {
lookup.new_round();
solutions.extend(tactics::type_constructor(ctx, &defs, &mut lookup));
solutions.extend(tactics::free_function(ctx, &defs, &mut lookup));
solutions.extend(tactics::impl_method(ctx, &defs, &mut lookup));
solutions.extend(tactics::struct_projection(ctx, &defs, &mut lookup));
solutions.extend(tactics::impl_static_method(ctx, &defs, &mut lookup));
// Discard not interesting `ScopeDef`s for speedup
for def in lookup.exhausted_scopedefs() {
defs.remove(def);
}
}
solutions.into_iter().filter(|it| !it.is_many()).unique().collect()
}

View file

@ -0,0 +1,468 @@
//! Type tree for term search
use hir_def::find_path::PrefixKind;
use hir_expand::mod_path::ModPath;
use hir_ty::{
db::HirDatabase,
display::{DisplaySourceCodeError, HirDisplay},
};
use itertools::Itertools;
use crate::{
Adt, AsAssocItem, Const, ConstParam, Field, Function, GenericDef, Local, ModuleDef,
SemanticsScope, Static, Struct, StructKind, Trait, Type, Variant,
};
/// Helper function to get path to `ModuleDef`
fn mod_item_path(
sema_scope: &SemanticsScope<'_>,
def: &ModuleDef,
prefer_no_std: bool,
prefer_prelude: bool,
) -> Option<ModPath> {
let db = sema_scope.db;
// Account for locals shadowing items from module
let name_hit_count = def.name(db).map(|def_name| {
let mut name_hit_count = 0;
sema_scope.process_all_names(&mut |name, _| {
if name == def_name {
name_hit_count += 1;
}
});
name_hit_count
});
let m = sema_scope.module();
match name_hit_count {
Some(0..=1) | None => m.find_use_path(db.upcast(), *def, prefer_no_std, prefer_prelude),
Some(_) => m.find_use_path_prefixed(
db.upcast(),
*def,
PrefixKind::ByCrate,
prefer_no_std,
prefer_prelude,
),
}
}
/// Helper function to get path to `ModuleDef` as string
fn mod_item_path_str(
sema_scope: &SemanticsScope<'_>,
def: &ModuleDef,
prefer_no_std: bool,
prefer_prelude: bool,
) -> Result<String, DisplaySourceCodeError> {
let path = mod_item_path(sema_scope, def, prefer_no_std, prefer_prelude);
path.map(|it| it.display(sema_scope.db.upcast()).to_string())
.ok_or(DisplaySourceCodeError::PathNotFound)
}
/// Helper function to get path to `Type`
fn type_path(
sema_scope: &SemanticsScope<'_>,
ty: &Type,
prefer_no_std: bool,
prefer_prelude: bool,
) -> Result<String, DisplaySourceCodeError> {
let db = sema_scope.db;
let m = sema_scope.module();
match ty.as_adt() {
Some(adt) => {
let ty_name = ty.display_source_code(db, m.id, true)?;
let mut path =
mod_item_path(sema_scope, &ModuleDef::Adt(adt), prefer_no_std, prefer_prelude)
.unwrap();
path.pop_segment();
let path = path.display(db.upcast()).to_string();
let res = match path.is_empty() {
true => ty_name,
false => format!("{path}::{ty_name}"),
};
Ok(res)
}
None => ty.display_source_code(db, m.id, true),
}
}
/// Helper function to filter out generic parameters that are default
fn non_default_generics(db: &dyn HirDatabase, def: GenericDef, generics: &[Type]) -> Vec<Type> {
def.type_or_const_params(db)
.into_iter()
.filter_map(|it| it.as_type_param(db))
.zip(generics)
.filter(|(tp, arg)| tp.default(db).as_ref() != Some(arg))
.map(|(_, arg)| arg.clone())
.collect()
}
/// Type tree shows how can we get from set of types to some type.
///
/// Consider the following code as an example
/// ```
/// fn foo(x: i32, y: bool) -> Option<i32> { None }
/// fn bar() {
/// let a = 1;
/// let b = true;
/// let c: Option<i32> = _;
/// }
/// ```
/// If we generate type tree in the place of `_` we get
/// ```txt
/// Option<i32>
/// |
/// foo(i32, bool)
/// / \
/// a: i32 b: bool
/// ```
/// So in short it pretty much gives us a way to get type `Option<i32>` using the items we have in
/// scope.
#[derive(Debug, Clone, Eq, Hash, PartialEq)]
pub enum Expr {
/// Constant
Const(Const),
/// Static variable
Static(Static),
/// Local variable
Local(Local),
/// Constant generic parameter
ConstParam(ConstParam),
/// Well known type (such as `true` for bool)
FamousType { ty: Type, value: &'static str },
/// Function call (does not take self param)
Function { func: Function, generics: Vec<Type>, params: Vec<Expr> },
/// Method call (has self param)
Method { func: Function, generics: Vec<Type>, target: Box<Expr>, params: Vec<Expr> },
/// Enum variant construction
Variant { variant: Variant, generics: Vec<Type>, params: Vec<Expr> },
/// Struct construction
Struct { strukt: Struct, generics: Vec<Type>, params: Vec<Expr> },
/// Struct field access
Field { expr: Box<Expr>, field: Field },
/// Passing type as reference (with `&`)
Reference(Box<Expr>),
/// Indicates possibility of many different options that all evaluate to `ty`
Many(Type),
}
impl Expr {
/// Generate source code for type tree.
///
/// Note that trait imports are not added to generated code.
/// To make sure that the code is valid, callee has to also ensure that all the traits listed
/// by `traits_used` method are also imported.
pub fn gen_source_code(
&self,
sema_scope: &SemanticsScope<'_>,
many_formatter: &mut dyn FnMut(&Type) -> String,
prefer_no_std: bool,
prefer_prelude: bool,
) -> Result<String, DisplaySourceCodeError> {
let db = sema_scope.db;
let mod_item_path_str = |s, def| mod_item_path_str(s, def, prefer_no_std, prefer_prelude);
match self {
Expr::Const(it) => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)),
Expr::Static(it) => mod_item_path_str(sema_scope, &ModuleDef::Static(*it)),
Expr::Local(it) => Ok(it.name(db).display(db.upcast()).to_string()),
Expr::ConstParam(it) => Ok(it.name(db).display(db.upcast()).to_string()),
Expr::FamousType { value, .. } => Ok(value.to_string()),
Expr::Function { func, params, .. } => {
let args = params
.iter()
.map(|f| {
f.gen_source_code(sema_scope, many_formatter, prefer_no_std, prefer_prelude)
})
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter()
.join(", ");
match func.as_assoc_item(db).map(|it| it.container(db)) {
Some(container) => {
let container_name = match container {
crate::AssocItemContainer::Trait(trait_) => {
mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_))?
}
crate::AssocItemContainer::Impl(imp) => {
let self_ty = imp.self_ty(db);
// Should it be guaranteed that `mod_item_path` always exists?
match self_ty.as_adt().and_then(|adt| {
mod_item_path(
sema_scope,
&adt.into(),
prefer_no_std,
prefer_prelude,
)
}) {
Some(path) => path.display(sema_scope.db.upcast()).to_string(),
None => self_ty.display(db).to_string(),
}
}
};
let fn_name = func.name(db).display(db.upcast()).to_string();
Ok(format!("{container_name}::{fn_name}({args})"))
}
None => {
let fn_name = mod_item_path_str(sema_scope, &ModuleDef::Function(*func))?;
Ok(format!("{fn_name}({args})"))
}
}
}
Expr::Method { func, target, params, .. } => {
if target.contains_many_in_illegal_pos() {
return Ok(many_formatter(&target.ty(db)));
}
let func_name = func.name(db).display(db.upcast()).to_string();
let self_param = func.self_param(db).unwrap();
let target = target.gen_source_code(
sema_scope,
many_formatter,
prefer_no_std,
prefer_prelude,
)?;
let args = params
.iter()
.map(|f| {
f.gen_source_code(sema_scope, many_formatter, prefer_no_std, prefer_prelude)
})
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter()
.join(", ");
match func.as_assoc_item(db).and_then(|it| it.container_or_implemented_trait(db)) {
Some(trait_) => {
let trait_name = mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_))?;
let target = match self_param.access(db) {
crate::Access::Shared => format!("&{target}"),
crate::Access::Exclusive => format!("&mut {target}"),
crate::Access::Owned => target,
};
let res = match args.is_empty() {
true => format!("{trait_name}::{func_name}({target})",),
false => format!("{trait_name}::{func_name}({target}, {args})",),
};
Ok(res)
}
None => Ok(format!("{target}.{func_name}({args})")),
}
}
Expr::Variant { variant, generics, params } => {
let generics = non_default_generics(db, (*variant).into(), generics);
let generics_str = match generics.is_empty() {
true => String::new(),
false => {
let generics = generics
.iter()
.map(|it| type_path(sema_scope, it, prefer_no_std, prefer_prelude))
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter()
.join(", ");
format!("::<{generics}>")
}
};
let inner = match variant.kind(db) {
StructKind::Tuple => {
let args = params
.iter()
.map(|f| {
f.gen_source_code(
sema_scope,
many_formatter,
prefer_no_std,
prefer_prelude,
)
})
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter()
.join(", ");
format!("{generics_str}({args})")
}
StructKind::Record => {
let fields = variant.fields(db);
let args = params
.iter()
.zip(fields.iter())
.map(|(a, f)| {
let tmp = format!(
"{}: {}",
f.name(db).display(db.upcast()),
a.gen_source_code(
sema_scope,
many_formatter,
prefer_no_std,
prefer_prelude
)?
);
Ok(tmp)
})
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter()
.join(", ");
format!("{generics_str}{{ {args} }}")
}
StructKind::Unit => generics_str,
};
let prefix = mod_item_path_str(sema_scope, &ModuleDef::Variant(*variant))?;
Ok(format!("{prefix}{inner}"))
}
Expr::Struct { strukt, generics, params } => {
let generics = non_default_generics(db, (*strukt).into(), generics);
let inner = match strukt.kind(db) {
StructKind::Tuple => {
let args = params
.iter()
.map(|a| {
a.gen_source_code(
sema_scope,
many_formatter,
prefer_no_std,
prefer_prelude,
)
})
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter()
.join(", ");
format!("({args})")
}
StructKind::Record => {
let fields = strukt.fields(db);
let args = params
.iter()
.zip(fields.iter())
.map(|(a, f)| {
let tmp = format!(
"{}: {}",
f.name(db).display(db.upcast()),
a.gen_source_code(
sema_scope,
many_formatter,
prefer_no_std,
prefer_prelude
)?
);
Ok(tmp)
})
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter()
.join(", ");
format!(" {{ {args} }}")
}
StructKind::Unit => match generics.is_empty() {
true => String::new(),
false => {
let generics = generics
.iter()
.map(|it| type_path(sema_scope, it, prefer_no_std, prefer_prelude))
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter()
.join(", ");
format!("::<{generics}>")
}
},
};
let prefix = mod_item_path_str(sema_scope, &ModuleDef::Adt(Adt::Struct(*strukt)))?;
Ok(format!("{prefix}{inner}"))
}
Expr::Field { expr, field } => {
if expr.contains_many_in_illegal_pos() {
return Ok(many_formatter(&expr.ty(db)));
}
let strukt = expr.gen_source_code(
sema_scope,
many_formatter,
prefer_no_std,
prefer_prelude,
)?;
let field = field.name(db).display(db.upcast()).to_string();
Ok(format!("{strukt}.{field}"))
}
Expr::Reference(expr) => {
if expr.contains_many_in_illegal_pos() {
return Ok(many_formatter(&expr.ty(db)));
}
let inner = expr.gen_source_code(
sema_scope,
many_formatter,
prefer_no_std,
prefer_prelude,
)?;
Ok(format!("&{inner}"))
}
Expr::Many(ty) => Ok(many_formatter(ty)),
}
}
/// Get type of the type tree.
///
/// Same as getting the type of root node
pub fn ty(&self, db: &dyn HirDatabase) -> Type {
match self {
Expr::Const(it) => it.ty(db),
Expr::Static(it) => it.ty(db),
Expr::Local(it) => it.ty(db),
Expr::ConstParam(it) => it.ty(db),
Expr::FamousType { ty, .. } => ty.clone(),
Expr::Function { func, generics, .. } => {
func.ret_type_with_args(db, generics.iter().cloned())
}
Expr::Method { func, generics, target, .. } => func.ret_type_with_args(
db,
target.ty(db).type_arguments().chain(generics.iter().cloned()),
),
Expr::Variant { variant, generics, .. } => {
Adt::from(variant.parent_enum(db)).ty_with_args(db, generics.iter().cloned())
}
Expr::Struct { strukt, generics, .. } => {
Adt::from(*strukt).ty_with_args(db, generics.iter().cloned())
}
Expr::Field { expr, field } => field.ty_with_args(db, expr.ty(db).type_arguments()),
Expr::Reference(it) => it.ty(db),
Expr::Many(ty) => ty.clone(),
}
}
/// List the traits used in type tree
pub fn traits_used(&self, db: &dyn HirDatabase) -> Vec<Trait> {
let mut res = Vec::new();
if let Expr::Method { func, params, .. } = self {
res.extend(params.iter().flat_map(|it| it.traits_used(db)));
if let Some(it) = func.as_assoc_item(db) {
if let Some(it) = it.container_or_implemented_trait(db) {
res.push(it);
}
}
}
res
}
/// Check in the tree contains `Expr::Many` variant in illegal place to insert `todo`,
/// `unimplemented` or similar macro
///
/// Some examples are following
/// ```no_compile
/// macro!().foo
/// macro!().bar()
/// &macro!()
/// ```
fn contains_many_in_illegal_pos(&self) -> bool {
match self {
Expr::Method { target, .. } => target.contains_many_in_illegal_pos(),
Expr::Field { expr, .. } => expr.contains_many_in_illegal_pos(),
Expr::Reference(target) => target.is_many(),
Expr::Many(_) => true,
_ => false,
}
}
/// Helper function to check if outermost type tree is `Expr::Many` variant
pub fn is_many(&self) -> bool {
matches!(self, Expr::Many(_))
}
}

View file

@ -0,0 +1,859 @@
//! Tactics for term search
//!
//! All the tactics take following arguments
//! * `ctx` - Context for the term search
//! * `defs` - Set of items in scope at term search target location
//! * `lookup` - Lookup table for types
//! And they return iterator that yields type trees that unify with the `goal` type.
use std::iter;
use hir_ty::db::HirDatabase;
use hir_ty::mir::BorrowKind;
use hir_ty::TyBuilder;
use itertools::Itertools;
use rustc_hash::FxHashSet;
use crate::{
Adt, AssocItem, Enum, GenericDef, GenericParam, HasVisibility, Impl, ModuleDef, ScopeDef, Type,
TypeParam, Variant,
};
use crate::term_search::{Expr, TermSearchConfig};
use super::{LookupTable, NewTypesKey, TermSearchCtx};
/// # Trivial tactic
///
/// Attempts to fulfill the goal by trying items in scope
/// Also works as a starting point to move all items in scope to lookup table.
///
/// # Arguments
/// * `ctx` - Context for the term search
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
///
/// Returns iterator that yields elements that unify with `goal`.
///
/// _Note that there is no use of calling this tactic in every iteration as the output does not
/// depend on the current state of `lookup`_
pub(super) fn trivial<'a, DB: HirDatabase>(
ctx: &'a TermSearchCtx<'a, DB>,
defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable,
) -> impl Iterator<Item = Expr> + 'a {
let db = ctx.sema.db;
defs.iter().filter_map(|def| {
let expr = match def {
ScopeDef::ModuleDef(ModuleDef::Const(it)) => Some(Expr::Const(*it)),
ScopeDef::ModuleDef(ModuleDef::Static(it)) => Some(Expr::Static(*it)),
ScopeDef::GenericParam(GenericParam::ConstParam(it)) => Some(Expr::ConstParam(*it)),
ScopeDef::Local(it) => {
if ctx.config.enable_borrowcheck {
let borrowck = db.borrowck(it.parent).ok()?;
let invalid = borrowck.iter().any(|b| {
b.partially_moved.iter().any(|moved| {
Some(&moved.local) == b.mir_body.binding_locals.get(it.binding_id)
}) || b.borrow_regions.iter().any(|region| {
// Shared borrows are fine
Some(&region.local) == b.mir_body.binding_locals.get(it.binding_id)
&& region.kind != BorrowKind::Shared
})
});
if invalid {
return None;
}
}
Some(Expr::Local(*it))
}
_ => None,
}?;
lookup.mark_exhausted(*def);
let ty = expr.ty(db);
lookup.insert(ty.clone(), std::iter::once(expr.clone()));
// Don't suggest local references as they are not valid for return
if matches!(expr, Expr::Local(_)) && ty.contains_reference(db) {
return None;
}
ty.could_unify_with_deeply(db, &ctx.goal).then_some(expr)
})
}
/// # Type constructor tactic
///
/// Attempts different type constructors for enums and structs in scope
///
/// Updates lookup by new types reached and returns iterator that yields
/// elements that unify with `goal`.
///
/// # Arguments
/// * `ctx` - Context for the term search
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
pub(super) fn type_constructor<'a, DB: HirDatabase>(
ctx: &'a TermSearchCtx<'a, DB>,
defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable,
) -> impl Iterator<Item = Expr> + 'a {
let db = ctx.sema.db;
let module = ctx.scope.module();
fn variant_helper(
db: &dyn HirDatabase,
lookup: &mut LookupTable,
parent_enum: Enum,
variant: Variant,
goal: &Type,
config: &TermSearchConfig,
) -> Vec<(Type, Vec<Expr>)> {
// Ignore unstable
if variant.is_unstable(db) {
return Vec::new();
}
let generics = GenericDef::from(variant.parent_enum(db));
let Some(type_params) = generics
.type_or_const_params(db)
.into_iter()
.map(|it| it.as_type_param(db))
.collect::<Option<Vec<TypeParam>>>()
else {
// Ignore enums with const generics
return Vec::new();
};
// We currently do not check lifetime bounds so ignore all types that have something to do
// with them
if !generics.lifetime_params(db).is_empty() {
return Vec::new();
}
// Only account for stable type parameters for now, unstable params can be default
// tho, for example in `Box<T, #[unstable] A: Allocator>`
if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) {
return Vec::new();
}
let non_default_type_params_len =
type_params.iter().filter(|it| it.default(db).is_none()).count();
let generic_params = lookup
.iter_types()
.collect::<Vec<_>>() // Force take ownership
.into_iter()
.permutations(non_default_type_params_len);
generic_params
.filter_map(move |generics| {
// Insert default type params
let mut g = generics.into_iter();
let generics: Vec<_> = type_params
.iter()
.map(|it| it.default(db).unwrap_or_else(|| g.next().expect("No generic")))
.collect();
let enum_ty = Adt::from(parent_enum).ty_with_args(db, generics.iter().cloned());
// Allow types with generics only if they take us straight to goal for
// performance reasons
if !generics.is_empty() && !enum_ty.could_unify_with_deeply(db, goal) {
return None;
}
// Ignore types that have something to do with lifetimes
if config.enable_borrowcheck && enum_ty.contains_reference(db) {
return None;
}
// Early exit if some param cannot be filled from lookup
let param_exprs: Vec<Vec<Expr>> = variant
.fields(db)
.into_iter()
.map(|field| lookup.find(db, &field.ty_with_args(db, generics.iter().cloned())))
.collect::<Option<_>>()?;
// Note that we need special case for 0 param constructors because of multi cartesian
// product
let variant_exprs: Vec<Expr> = if param_exprs.is_empty() {
vec![Expr::Variant { variant, generics: generics.clone(), params: Vec::new() }]
} else {
param_exprs
.into_iter()
.multi_cartesian_product()
.map(|params| Expr::Variant { variant, generics: generics.clone(), params })
.collect()
};
lookup.insert(enum_ty.clone(), variant_exprs.iter().cloned());
Some((enum_ty, variant_exprs))
})
.collect()
}
defs.iter()
.filter_map(move |def| match def {
ScopeDef::ModuleDef(ModuleDef::Variant(it)) => {
let variant_exprs =
variant_helper(db, lookup, it.parent_enum(db), *it, &ctx.goal, &ctx.config);
if variant_exprs.is_empty() {
return None;
}
lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Variant(*it)));
Some(variant_exprs)
}
ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(enum_))) => {
let exprs: Vec<(Type, Vec<Expr>)> = enum_
.variants(db)
.into_iter()
.flat_map(|it| variant_helper(db, lookup, *enum_, it, &ctx.goal, &ctx.config))
.collect();
if !exprs.is_empty() {
lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(*enum_))));
}
Some(exprs)
}
ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Struct(it))) => {
// Ignore unstable and not visible
if it.is_unstable(db) || !it.is_visible_from(db, module) {
return None;
}
let generics = GenericDef::from(*it);
// Ignore const params for now
let type_params = generics
.type_or_const_params(db)
.into_iter()
.map(|it| it.as_type_param(db))
.collect::<Option<Vec<TypeParam>>>()?;
// We currently do not check lifetime bounds so ignore all types that have something to do
// with them
if !generics.lifetime_params(db).is_empty() {
return None;
}
// Only account for stable type parameters for now, unstable params can be default
// tho, for example in `Box<T, #[unstable] A: Allocator>`
if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) {
return None;
}
let non_default_type_params_len =
type_params.iter().filter(|it| it.default(db).is_none()).count();
let generic_params = lookup
.iter_types()
.collect::<Vec<_>>() // Force take ownership
.into_iter()
.permutations(non_default_type_params_len);
let exprs = generic_params
.filter_map(|generics| {
// Insert default type params
let mut g = generics.into_iter();
let generics: Vec<_> = type_params
.iter()
.map(|it| {
it.default(db)
.unwrap_or_else(|| g.next().expect("Missing type param"))
})
.collect();
let struct_ty = Adt::from(*it).ty_with_args(db, generics.iter().cloned());
// Allow types with generics only if they take us straight to goal for
// performance reasons
if non_default_type_params_len != 0
&& struct_ty.could_unify_with_deeply(db, &ctx.goal)
{
return None;
}
// Ignore types that have something to do with lifetimes
if ctx.config.enable_borrowcheck && struct_ty.contains_reference(db) {
return None;
}
let fileds = it.fields(db);
// Check if all fields are visible, otherwise we cannot fill them
if fileds.iter().any(|it| !it.is_visible_from(db, module)) {
return None;
}
// Early exit if some param cannot be filled from lookup
let param_exprs: Vec<Vec<Expr>> = fileds
.into_iter()
.map(|field| lookup.find(db, &field.ty(db)))
.collect::<Option<_>>()?;
// Note that we need special case for 0 param constructors because of multi cartesian
// product
let struct_exprs: Vec<Expr> = if param_exprs.is_empty() {
vec![Expr::Struct { strukt: *it, generics, params: Vec::new() }]
} else {
param_exprs
.into_iter()
.multi_cartesian_product()
.map(|params| Expr::Struct {
strukt: *it,
generics: generics.clone(),
params,
})
.collect()
};
lookup
.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Struct(*it))));
lookup.insert(struct_ty.clone(), struct_exprs.iter().cloned());
Some((struct_ty, struct_exprs))
})
.collect();
Some(exprs)
}
_ => None,
})
.flatten()
.filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs))
.flatten()
}
/// # Free function tactic
///
/// Attempts to call different functions in scope with parameters from lookup table.
/// Functions that include generics are not used for performance reasons.
///
/// Updates lookup by new types reached and returns iterator that yields
/// elements that unify with `goal`.
///
/// # Arguments
/// * `ctx` - Context for the term search
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
pub(super) fn free_function<'a, DB: HirDatabase>(
ctx: &'a TermSearchCtx<'a, DB>,
defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable,
) -> impl Iterator<Item = Expr> + 'a {
let db = ctx.sema.db;
let module = ctx.scope.module();
defs.iter()
.filter_map(move |def| match def {
ScopeDef::ModuleDef(ModuleDef::Function(it)) => {
let generics = GenericDef::from(*it);
// Ignore const params for now
let type_params = generics
.type_or_const_params(db)
.into_iter()
.map(|it| it.as_type_param(db))
.collect::<Option<Vec<TypeParam>>>()?;
// Ignore lifetimes as we do not check them
if !generics.lifetime_params(db).is_empty() {
return None;
}
// Only account for stable type parameters for now, unstable params can be default
// tho, for example in `Box<T, #[unstable] A: Allocator>`
if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) {
return None;
}
let non_default_type_params_len =
type_params.iter().filter(|it| it.default(db).is_none()).count();
// Ignore bigger number of generics for now as they kill the performance
if non_default_type_params_len > 0 {
return None;
}
let generic_params = lookup
.iter_types()
.collect::<Vec<_>>() // Force take ownership
.into_iter()
.permutations(non_default_type_params_len);
let exprs: Vec<_> = generic_params
.filter_map(|generics| {
// Insert default type params
let mut g = generics.into_iter();
let generics: Vec<_> = type_params
.iter()
.map(|it| match it.default(db) {
Some(ty) => Some(ty),
None => {
let generic = g.next().expect("Missing type param");
// Filter out generics that do not unify due to trait bounds
it.ty(db).could_unify_with(db, &generic).then_some(generic)
}
})
.collect::<Option<_>>()?;
let ret_ty = it.ret_type_with_args(db, generics.iter().cloned());
// Filter out private and unsafe functions
if !it.is_visible_from(db, module)
|| it.is_unsafe_to_call(db)
|| it.is_unstable(db)
|| ctx.config.enable_borrowcheck && ret_ty.contains_reference(db)
|| ret_ty.is_raw_ptr()
{
return None;
}
// Early exit if some param cannot be filled from lookup
let param_exprs: Vec<Vec<Expr>> = it
.params_without_self_with_args(db, generics.iter().cloned())
.into_iter()
.map(|field| {
let ty = field.ty();
match ty.is_mutable_reference() {
true => None,
false => lookup.find_autoref(db, ty),
}
})
.collect::<Option<_>>()?;
// Note that we need special case for 0 param constructors because of multi cartesian
// product
let fn_exprs: Vec<Expr> = if param_exprs.is_empty() {
vec![Expr::Function { func: *it, generics, params: Vec::new() }]
} else {
param_exprs
.into_iter()
.multi_cartesian_product()
.map(|params| Expr::Function {
func: *it,
generics: generics.clone(),
params,
})
.collect()
};
lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Function(*it)));
lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned());
Some((ret_ty, fn_exprs))
})
.collect();
Some(exprs)
}
_ => None,
})
.flatten()
.filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs))
.flatten()
}
/// # Impl method tactic
///
/// Attempts to to call methods on types from lookup table.
/// This includes both functions from direct impl blocks as well as functions from traits.
/// Methods defined in impl blocks that are generic and methods that are themselves have
/// generics are ignored for performance reasons.
///
/// Updates lookup by new types reached and returns iterator that yields
/// elements that unify with `goal`.
///
/// # Arguments
/// * `ctx` - Context for the term search
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
pub(super) fn impl_method<'a, DB: HirDatabase>(
ctx: &'a TermSearchCtx<'a, DB>,
_defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable,
) -> impl Iterator<Item = Expr> + 'a {
let db = ctx.sema.db;
let module = ctx.scope.module();
lookup
.new_types(NewTypesKey::ImplMethod)
.into_iter()
.flat_map(|ty| {
Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp))
})
.flat_map(|(ty, imp)| imp.items(db).into_iter().map(move |item| (imp, ty.clone(), item)))
.filter_map(|(imp, ty, it)| match it {
AssocItem::Function(f) => Some((imp, ty, f)),
_ => None,
})
.filter_map(move |(imp, ty, it)| {
let fn_generics = GenericDef::from(it);
let imp_generics = GenericDef::from(imp);
// Ignore const params for now
let imp_type_params = imp_generics
.type_or_const_params(db)
.into_iter()
.map(|it| it.as_type_param(db))
.collect::<Option<Vec<TypeParam>>>()?;
// Ignore const params for now
let fn_type_params = fn_generics
.type_or_const_params(db)
.into_iter()
.map(|it| it.as_type_param(db))
.collect::<Option<Vec<TypeParam>>>()?;
// Ignore all functions that have something to do with lifetimes as we don't check them
if !fn_generics.lifetime_params(db).is_empty() {
return None;
}
// Ignore functions without self param
if !it.has_self_param(db) {
return None;
}
// Filter out private and unsafe functions
if !it.is_visible_from(db, module) || it.is_unsafe_to_call(db) || it.is_unstable(db) {
return None;
}
// Only account for stable type parameters for now, unstable params can be default
// tho, for example in `Box<T, #[unstable] A: Allocator>`
if imp_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none())
|| fn_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none())
{
return None;
}
let non_default_type_params_len = imp_type_params
.iter()
.chain(fn_type_params.iter())
.filter(|it| it.default(db).is_none())
.count();
// Ignore bigger number of generics for now as they kill the performance
if non_default_type_params_len > 0 {
return None;
}
let generic_params = lookup
.iter_types()
.collect::<Vec<_>>() // Force take ownership
.into_iter()
.permutations(non_default_type_params_len);
let exprs: Vec<_> = generic_params
.filter_map(|generics| {
// Insert default type params
let mut g = generics.into_iter();
let generics: Vec<_> = imp_type_params
.iter()
.chain(fn_type_params.iter())
.map(|it| match it.default(db) {
Some(ty) => Some(ty),
None => {
let generic = g.next().expect("Missing type param");
// Filter out generics that do not unify due to trait bounds
it.ty(db).could_unify_with(db, &generic).then_some(generic)
}
})
.collect::<Option<_>>()?;
let ret_ty = it.ret_type_with_args(
db,
ty.type_arguments().chain(generics.iter().cloned()),
);
// Filter out functions that return references
if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db)
|| ret_ty.is_raw_ptr()
{
return None;
}
// Ignore functions that do not change the type
if ty.could_unify_with_deeply(db, &ret_ty) {
return None;
}
let self_ty = it
.self_param(db)
.expect("No self param")
.ty_with_args(db, ty.type_arguments().chain(generics.iter().cloned()));
// Ignore functions that have different self type
if !self_ty.autoderef(db).any(|s_ty| ty == s_ty) {
return None;
}
let target_type_exprs = lookup.find(db, &ty).expect("Type not in lookup");
// Early exit if some param cannot be filled from lookup
let param_exprs: Vec<Vec<Expr>> = it
.params_without_self_with_args(
db,
ty.type_arguments().chain(generics.iter().cloned()),
)
.into_iter()
.map(|field| lookup.find_autoref(db, field.ty()))
.collect::<Option<_>>()?;
let fn_exprs: Vec<Expr> = std::iter::once(target_type_exprs)
.chain(param_exprs)
.multi_cartesian_product()
.map(|params| {
let mut params = params.into_iter();
let target = Box::new(params.next().unwrap());
Expr::Method {
func: it,
generics: generics.clone(),
target,
params: params.collect(),
}
})
.collect();
lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned());
Some((ret_ty, fn_exprs))
})
.collect();
Some(exprs)
})
.flatten()
.filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs))
.flatten()
}
/// # Struct projection tactic
///
/// Attempts different struct fields (`foo.bar.baz`)
///
/// Updates lookup by new types reached and returns iterator that yields
/// elements that unify with `goal`.
///
/// # Arguments
/// * `ctx` - Context for the term search
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
pub(super) fn struct_projection<'a, DB: HirDatabase>(
ctx: &'a TermSearchCtx<'a, DB>,
_defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable,
) -> impl Iterator<Item = Expr> + 'a {
let db = ctx.sema.db;
let module = ctx.scope.module();
lookup
.new_types(NewTypesKey::StructProjection)
.into_iter()
.map(|ty| (ty.clone(), lookup.find(db, &ty).expect("Expr not in lookup")))
.flat_map(move |(ty, targets)| {
ty.fields(db).into_iter().filter_map(move |(field, filed_ty)| {
if !field.is_visible_from(db, module) {
return None;
}
let exprs = targets
.clone()
.into_iter()
.map(move |target| Expr::Field { field, expr: Box::new(target) });
Some((filed_ty, exprs))
})
})
.filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs))
.flatten()
}
/// # Famous types tactic
///
/// Attempts different values of well known types such as `true` or `false`.
///
/// Updates lookup by new types reached and returns iterator that yields
/// elements that unify with `goal`.
///
/// _Note that there is no point of calling it iteratively as the output is always the same_
///
/// # Arguments
/// * `ctx` - Context for the term search
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
pub(super) fn famous_types<'a, DB: HirDatabase>(
ctx: &'a TermSearchCtx<'a, DB>,
_defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable,
) -> impl Iterator<Item = Expr> + 'a {
let db = ctx.sema.db;
let module = ctx.scope.module();
[
Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::bool()), value: "true" },
Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::bool()), value: "false" },
Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::unit()), value: "()" },
]
.into_iter()
.map(|exprs| {
lookup.insert(exprs.ty(db), std::iter::once(exprs.clone()));
exprs
})
.filter(|expr| expr.ty(db).could_unify_with_deeply(db, &ctx.goal))
}
/// # Impl static method (without self type) tactic
///
/// Attempts different functions from impl blocks that take no self parameter.
///
/// Updates lookup by new types reached and returns iterator that yields
/// elements that unify with `goal`.
///
/// # Arguments
/// * `ctx` - Context for the term search
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
pub(super) fn impl_static_method<'a, DB: HirDatabase>(
ctx: &'a TermSearchCtx<'a, DB>,
_defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable,
) -> impl Iterator<Item = Expr> + 'a {
let db = ctx.sema.db;
let module = ctx.scope.module();
lookup
.take_types_wishlist()
.into_iter()
.chain(iter::once(ctx.goal.clone()))
.flat_map(|ty| {
Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp))
})
.filter(|(_, imp)| !imp.is_unsafe(db))
.flat_map(|(ty, imp)| imp.items(db).into_iter().map(move |item| (imp, ty.clone(), item)))
.filter_map(|(imp, ty, it)| match it {
AssocItem::Function(f) => Some((imp, ty, f)),
_ => None,
})
.filter_map(move |(imp, ty, it)| {
let fn_generics = GenericDef::from(it);
let imp_generics = GenericDef::from(imp);
// Ignore const params for now
let imp_type_params = imp_generics
.type_or_const_params(db)
.into_iter()
.map(|it| it.as_type_param(db))
.collect::<Option<Vec<TypeParam>>>()?;
// Ignore const params for now
let fn_type_params = fn_generics
.type_or_const_params(db)
.into_iter()
.map(|it| it.as_type_param(db))
.collect::<Option<Vec<TypeParam>>>()?;
// Ignore all functions that have something to do with lifetimes as we don't check them
if !fn_generics.lifetime_params(db).is_empty()
|| !imp_generics.lifetime_params(db).is_empty()
{
return None;
}
// Ignore functions with self param
if it.has_self_param(db) {
return None;
}
// Filter out private and unsafe functions
if !it.is_visible_from(db, module) || it.is_unsafe_to_call(db) || it.is_unstable(db) {
return None;
}
// Only account for stable type parameters for now, unstable params can be default
// tho, for example in `Box<T, #[unstable] A: Allocator>`
if imp_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none())
|| fn_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none())
{
return None;
}
let non_default_type_params_len = imp_type_params
.iter()
.chain(fn_type_params.iter())
.filter(|it| it.default(db).is_none())
.count();
// Ignore bigger number of generics for now as they kill the performance
if non_default_type_params_len > 1 {
return None;
}
let generic_params = lookup
.iter_types()
.collect::<Vec<_>>() // Force take ownership
.into_iter()
.permutations(non_default_type_params_len);
let exprs: Vec<_> = generic_params
.filter_map(|generics| {
// Insert default type params
let mut g = generics.into_iter();
let generics: Vec<_> = imp_type_params
.iter()
.chain(fn_type_params.iter())
.map(|it| match it.default(db) {
Some(ty) => Some(ty),
None => {
let generic = g.next().expect("Missing type param");
it.trait_bounds(db)
.into_iter()
.all(|bound| generic.impls_trait(db, bound, &[]));
// Filter out generics that do not unify due to trait bounds
it.ty(db).could_unify_with(db, &generic).then_some(generic)
}
})
.collect::<Option<_>>()?;
let ret_ty = it.ret_type_with_args(
db,
ty.type_arguments().chain(generics.iter().cloned()),
);
// Filter out functions that return references
if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db)
|| ret_ty.is_raw_ptr()
{
return None;
}
// Ignore functions that do not change the type
// if ty.could_unify_with_deeply(db, &ret_ty) {
// return None;
// }
// Early exit if some param cannot be filled from lookup
let param_exprs: Vec<Vec<Expr>> = it
.params_without_self_with_args(
db,
ty.type_arguments().chain(generics.iter().cloned()),
)
.into_iter()
.map(|field| lookup.find_autoref(db, field.ty()))
.collect::<Option<_>>()?;
// Note that we need special case for 0 param constructors because of multi cartesian
// product
let fn_exprs: Vec<Expr> = if param_exprs.is_empty() {
vec![Expr::Function { func: it, generics, params: Vec::new() }]
} else {
param_exprs
.into_iter()
.multi_cartesian_product()
.map(|params| Expr::Function {
func: it,
generics: generics.clone(),
params,
})
.collect()
};
lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned());
Some((ret_ty, fn_exprs))
})
.collect();
Some(exprs)
})
.flatten()
.filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs))
.flatten()
}

View file

@ -79,7 +79,7 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>)
edit.edit_file(target_file); edit.edit_file(target_file);
let vis_owner = edit.make_mut(vis_owner); let vis_owner = edit.make_mut(vis_owner);
vis_owner.set_visibility(missing_visibility.clone_for_update()); vis_owner.set_visibility(Some(missing_visibility.clone_for_update()));
if let Some((cap, vis)) = ctx.config.snippet_cap.zip(vis_owner.visibility()) { if let Some((cap, vis)) = ctx.config.snippet_cap.zip(vis_owner.visibility()) {
edit.add_tabstop_before(cap, vis); edit.add_tabstop_before(cap, vis);
@ -131,7 +131,7 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>
edit.edit_file(target_file); edit.edit_file(target_file);
let vis_owner = edit.make_mut(vis_owner); let vis_owner = edit.make_mut(vis_owner);
vis_owner.set_visibility(missing_visibility.clone_for_update()); vis_owner.set_visibility(Some(missing_visibility.clone_for_update()));
if let Some((cap, vis)) = ctx.config.snippet_cap.zip(vis_owner.visibility()) { if let Some((cap, vis)) = ctx.config.snippet_cap.zip(vis_owner.visibility()) {
edit.add_tabstop_before(cap, vis); edit.add_tabstop_before(cap, vis);

View file

@ -1,4 +1,4 @@
use hir::{self, HasCrate, HasVisibility}; use hir::{HasCrate, HasVisibility};
use ide_db::{path_transform::PathTransform, FxHashSet}; use ide_db::{path_transform::PathTransform, FxHashSet};
use syntax::{ use syntax::{
ast::{ ast::{

View file

@ -1,8 +1,13 @@
use crate::assist_context::{AssistContext, Assists}; use crate::assist_context::{AssistContext, Assists};
use ide_db::assists::AssistId; use ide_db::assists::AssistId;
use syntax::{ use syntax::{
ast::{self, edit::IndentLevel, make, HasGenericParams, HasVisibility}, ast::{
ted, AstNode, SyntaxKind, self,
edit_in_place::{HasVisibilityEdit, Indent},
make, HasGenericParams, HasName,
},
ted::{self, Position},
AstNode, SyntaxKind, T,
}; };
// NOTES : // NOTES :
@ -44,7 +49,7 @@ use syntax::{
// }; // };
// } // }
// //
// trait ${0:TraitName}<const N: usize> { // trait ${0:NewTrait}<const N: usize> {
// // Used as an associated constant. // // Used as an associated constant.
// const CONST_ASSOC: usize = N * 4; // const CONST_ASSOC: usize = N * 4;
// //
@ -53,7 +58,7 @@ use syntax::{
// const_maker! {i32, 7} // const_maker! {i32, 7}
// } // }
// //
// impl<const N: usize> ${0:TraitName}<N> for Foo<N> { // impl<const N: usize> ${0:NewTrait}<N> for Foo<N> {
// // Used as an associated constant. // // Used as an associated constant.
// const CONST_ASSOC: usize = N * 4; // const CONST_ASSOC: usize = N * 4;
// //
@ -94,8 +99,10 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_
"Generate trait from impl", "Generate trait from impl",
impl_ast.syntax().text_range(), impl_ast.syntax().text_range(),
|builder| { |builder| {
let impl_ast = builder.make_mut(impl_ast);
let trait_items = assoc_items.clone_for_update(); let trait_items = assoc_items.clone_for_update();
let impl_items = assoc_items.clone_for_update(); let impl_items = builder.make_mut(assoc_items);
let impl_name = builder.make_mut(impl_name);
trait_items.assoc_items().for_each(|item| { trait_items.assoc_items().for_each(|item| {
strip_body(&item); strip_body(&item);
@ -112,46 +119,42 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_
impl_ast.generic_param_list(), impl_ast.generic_param_list(),
impl_ast.where_clause(), impl_ast.where_clause(),
trait_items, trait_items,
); )
.clone_for_update();
let trait_name = trait_ast.name().expect("new trait should have a name");
let trait_name_ref = make::name_ref(&trait_name.to_string()).clone_for_update();
// Change `impl Foo` to `impl NewTrait for Foo` // Change `impl Foo` to `impl NewTrait for Foo`
let arg_list = if let Some(genpars) = impl_ast.generic_param_list() { let mut elements = vec![
genpars.to_generic_args().to_string() trait_name_ref.syntax().clone().into(),
} else { make::tokens::single_space().into(),
"".to_owned() make::token(T![for]).into(),
}; ];
if let Some(snippet_cap) = ctx.config.snippet_cap { if let Some(params) = impl_ast.generic_param_list() {
builder.replace_snippet( let gen_args = &params.to_generic_args().clone_for_update();
snippet_cap, elements.insert(1, gen_args.syntax().clone().into());
impl_name.syntax().text_range(),
format!("${{0:TraitName}}{} for {}", arg_list, impl_name),
);
// Insert trait before TraitImpl
builder.insert_snippet(
snippet_cap,
impl_ast.syntax().text_range().start(),
format!(
"{}\n\n{}",
trait_ast.to_string().replace("NewTrait", "${0:TraitName}"),
IndentLevel::from_node(impl_ast.syntax())
),
);
} else {
builder.replace(
impl_name.syntax().text_range(),
format!("NewTrait{} for {}", arg_list, impl_name),
);
// Insert trait before TraitImpl
builder.insert(
impl_ast.syntax().text_range().start(),
format!("{}\n\n{}", trait_ast, IndentLevel::from_node(impl_ast.syntax())),
);
} }
builder.replace(assoc_items.syntax().text_range(), impl_items.to_string()); ted::insert_all(Position::before(impl_name.syntax()), elements);
// Insert trait before TraitImpl
ted::insert_all_raw(
Position::before(impl_ast.syntax()),
vec![
trait_ast.syntax().clone().into(),
make::tokens::whitespace(&format!("\n\n{}", impl_ast.indent_level())).into(),
],
);
// Link the trait name & trait ref names together as a placeholder snippet group
if let Some(cap) = ctx.config.snippet_cap {
builder.add_placeholder_snippet_group(
cap,
vec![trait_name.syntax().clone(), trait_name_ref.syntax().clone()],
);
}
}, },
); );
@ -160,23 +163,8 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_
/// `E0449` Trait items always share the visibility of their trait /// `E0449` Trait items always share the visibility of their trait
fn remove_items_visibility(item: &ast::AssocItem) { fn remove_items_visibility(item: &ast::AssocItem) {
match item { if let Some(has_vis) = ast::AnyHasVisibility::cast(item.syntax().clone()) {
ast::AssocItem::Const(c) => { has_vis.set_visibility(None);
if let Some(vis) = c.visibility() {
ted::remove(vis.syntax());
}
}
ast::AssocItem::Fn(f) => {
if let Some(vis) = f.visibility() {
ted::remove(vis.syntax());
}
}
ast::AssocItem::TypeAlias(t) => {
if let Some(vis) = t.visibility() {
ted::remove(vis.syntax());
}
}
_ => (),
} }
} }
@ -404,12 +392,12 @@ impl<const N: usize> F$0oo<N> {
r#" r#"
struct Foo<const N: usize>([i32; N]); struct Foo<const N: usize>([i32; N]);
trait ${0:TraitName}<const N: usize> { trait ${0:NewTrait}<const N: usize> {
// Used as an associated constant. // Used as an associated constant.
const CONST: usize = N * 4; const CONST: usize = N * 4;
} }
impl<const N: usize> ${0:TraitName}<N> for Foo<N> { impl<const N: usize> ${0:NewTrait}<N> for Foo<N> {
// Used as an associated constant. // Used as an associated constant.
const CONST: usize = N * 4; const CONST: usize = N * 4;
} }

View file

@ -0,0 +1,253 @@
//! Term search assist
use hir::term_search::TermSearchCtx;
use ide_db::{
assists::{AssistId, AssistKind, GroupLabel},
famous_defs::FamousDefs,
};
use itertools::Itertools;
use syntax::{ast, AstNode};
use crate::assist_context::{AssistContext, Assists};
pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let unexpanded = ctx.find_node_at_offset::<ast::MacroCall>()?;
let syntax = unexpanded.syntax();
let goal_range = syntax.text_range();
let parent = syntax.parent()?;
let scope = ctx.sema.scope(&parent)?;
let macro_call = ctx.sema.resolve_macro_call(&unexpanded)?;
let famous_defs = FamousDefs(&ctx.sema, scope.krate());
let std_todo = famous_defs.core_macros_todo()?;
let std_unimplemented = famous_defs.core_macros_unimplemented()?;
if macro_call != std_todo && macro_call != std_unimplemented {
return None;
}
let target_ty = ctx.sema.type_of_expr(&ast::Expr::cast(parent.clone())?)?.adjusted();
let term_search_ctx = TermSearchCtx {
sema: &ctx.sema,
scope: &scope,
goal: target_ty,
config: Default::default(),
};
let paths = hir::term_search::term_search(&term_search_ctx);
if paths.is_empty() {
return None;
}
let mut formatter = |_: &hir::Type| String::from("todo!()");
let paths = paths
.into_iter()
.filter_map(|path| {
path.gen_source_code(
&scope,
&mut formatter,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)
.ok()
})
.unique();
for code in paths {
acc.add_group(
&GroupLabel(String::from("Term search")),
AssistId("term_search", AssistKind::Generate),
format!("Replace todo!() with {code}"),
goal_range,
|builder| {
builder.replace(goal_range, code);
},
);
}
Some(())
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
use super::*;
#[test]
fn test_complete_local() {
check_assist(
term_search,
r#"//- minicore: todo, unimplemented
fn f() { let a: u128 = 1; let b: u128 = todo$0!() }"#,
r#"fn f() { let a: u128 = 1; let b: u128 = a }"#,
)
}
#[test]
fn test_complete_todo_with_msg() {
check_assist(
term_search,
r#"//- minicore: todo, unimplemented
fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#,
r#"fn f() { let a: u128 = 1; let b: u128 = a }"#,
)
}
#[test]
fn test_complete_unimplemented_with_msg() {
check_assist(
term_search,
r#"//- minicore: todo, unimplemented
fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#,
r#"fn f() { let a: u128 = 1; let b: u128 = a }"#,
)
}
#[test]
fn test_complete_unimplemented() {
check_assist(
term_search,
r#"//- minicore: todo, unimplemented
fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#,
r#"fn f() { let a: u128 = 1; let b: u128 = a }"#,
)
}
#[test]
fn test_complete_struct_field() {
check_assist(
term_search,
r#"//- minicore: todo, unimplemented
struct A { pub x: i32, y: bool }
fn f() { let a = A { x: 1, y: true }; let b: i32 = todo$0!(); }"#,
r#"struct A { pub x: i32, y: bool }
fn f() { let a = A { x: 1, y: true }; let b: i32 = a.x; }"#,
)
}
#[test]
fn test_enum_with_generics() {
check_assist(
term_search,
r#"//- minicore: todo, unimplemented, option
fn f() { let a: i32 = 1; let b: Option<i32> = todo$0!(); }"#,
r#"fn f() { let a: i32 = 1; let b: Option<i32> = None; }"#,
)
}
#[test]
fn test_enum_with_generics2() {
check_assist(
term_search,
r#"//- minicore: todo, unimplemented
enum Option<T> { None, Some(T) }
fn f() { let a: i32 = 1; let b: Option<i32> = todo$0!(); }"#,
r#"enum Option<T> { None, Some(T) }
fn f() { let a: i32 = 1; let b: Option<i32> = Option::Some(a); }"#,
)
}
#[test]
fn test_enum_with_generics3() {
check_assist(
term_search,
r#"//- minicore: todo, unimplemented
enum Option<T> { None, Some(T) }
fn f() { let a: Option<i32> = Option::None; let b: Option<Option<i32>> = todo$0!(); }"#,
r#"enum Option<T> { None, Some(T) }
fn f() { let a: Option<i32> = Option::None; let b: Option<Option<i32>> = Option::Some(a); }"#,
)
}
#[test]
fn test_enum_with_generics4() {
check_assist(
term_search,
r#"//- minicore: todo, unimplemented
enum Foo<T = i32> { Foo(T) }
fn f() { let a = 0; let b: Foo = todo$0!(); }"#,
r#"enum Foo<T = i32> { Foo(T) }
fn f() { let a = 0; let b: Foo = Foo::Foo(a); }"#,
);
check_assist(
term_search,
r#"//- minicore: todo, unimplemented
enum Foo<T = i32> { Foo(T) }
fn f() { let a: Foo<u32> = Foo::Foo(0); let b: Foo<u32> = todo$0!(); }"#,
r#"enum Foo<T = i32> { Foo(T) }
fn f() { let a: Foo<u32> = Foo::Foo(0); let b: Foo<u32> = a; }"#,
)
}
#[test]
fn test_newtype() {
check_assist(
term_search,
r#"//- minicore: todo, unimplemented
struct Foo(i32);
fn f() { let a: i32 = 1; let b: Foo = todo$0!(); }"#,
r#"struct Foo(i32);
fn f() { let a: i32 = 1; let b: Foo = Foo(a); }"#,
)
}
#[test]
fn test_shadowing() {
check_assist(
term_search,
r#"//- minicore: todo, unimplemented
fn f() { let a: i32 = 1; let b: i32 = 2; let a: u32 = 0; let c: i32 = todo$0!(); }"#,
r#"fn f() { let a: i32 = 1; let b: i32 = 2; let a: u32 = 0; let c: i32 = b; }"#,
)
}
#[test]
fn test_famous_bool() {
check_assist(
term_search,
r#"//- minicore: todo, unimplemented
fn f() { let a: bool = todo$0!(); }"#,
r#"fn f() { let a: bool = false; }"#,
)
}
#[test]
fn test_fn_with_reference_types() {
check_assist(
term_search,
r#"//- minicore: todo, unimplemented
fn f(a: &i32) -> f32 { a as f32 }
fn g() { let a = 1; let b: f32 = todo$0!(); }"#,
r#"fn f(a: &i32) -> f32 { a as f32 }
fn g() { let a = 1; let b: f32 = f(&a); }"#,
)
}
#[test]
fn test_fn_with_reference_types2() {
check_assist(
term_search,
r#"//- minicore: todo, unimplemented
fn f(a: &i32) -> f32 { a as f32 }
fn g() { let a = &1; let b: f32 = todo$0!(); }"#,
r#"fn f(a: &i32) -> f32 { a as f32 }
fn g() { let a = &1; let b: f32 = f(a); }"#,
)
}
#[test]
fn test_fn_with_reference_types3() {
check_assist_not_applicable(
term_search,
r#"//- minicore: todo, unimplemented
fn f(a: &i32) -> f32 { a as f32 }
fn g() { let a = &mut 1; let b: f32 = todo$0!(); }"#,
)
}
}

View file

@ -60,11 +60,6 @@
#![warn(rust_2018_idioms, unused_lifetimes)] #![warn(rust_2018_idioms, unused_lifetimes)]
#[allow(unused)]
macro_rules! eprintln {
($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
}
mod assist_config; mod assist_config;
mod assist_context; mod assist_context;
#[cfg(test)] #[cfg(test)]
@ -210,6 +205,7 @@ mod handlers {
mod replace_turbofish_with_explicit_type; mod replace_turbofish_with_explicit_type;
mod sort_items; mod sort_items;
mod split_import; mod split_import;
mod term_search;
mod toggle_ignore; mod toggle_ignore;
mod unmerge_match_arm; mod unmerge_match_arm;
mod unmerge_use; mod unmerge_use;
@ -332,6 +328,7 @@ mod handlers {
replace_arith_op::replace_arith_with_saturating, replace_arith_op::replace_arith_with_saturating,
sort_items::sort_items, sort_items::sort_items,
split_import::split_import, split_import::split_import,
term_search::term_search,
toggle_ignore::toggle_ignore, toggle_ignore::toggle_ignore,
unmerge_match_arm::unmerge_match_arm, unmerge_match_arm::unmerge_match_arm,
unmerge_use::unmerge_use, unmerge_use::unmerge_use,

View file

@ -1665,7 +1665,7 @@ macro_rules! const_maker {
}; };
} }
trait ${0:TraitName}<const N: usize> { trait ${0:NewTrait}<const N: usize> {
// Used as an associated constant. // Used as an associated constant.
const CONST_ASSOC: usize = N * 4; const CONST_ASSOC: usize = N * 4;
@ -1674,7 +1674,7 @@ trait ${0:TraitName}<const N: usize> {
const_maker! {i32, 7} const_maker! {i32, 7}
} }
impl<const N: usize> ${0:TraitName}<N> for Foo<N> { impl<const N: usize> ${0:NewTrait}<N> for Foo<N> {
// Used as an associated constant. // Used as an associated constant.
const CONST_ASSOC: usize = N * 4; const CONST_ASSOC: usize = N * 4;

View file

@ -40,7 +40,8 @@ use crate::{
literal::{render_struct_literal, render_variant_lit}, literal::{render_struct_literal, render_variant_lit},
macro_::render_macro, macro_::render_macro,
pattern::{render_struct_pat, render_variant_pat}, pattern::{render_struct_pat, render_variant_pat},
render_field, render_path_resolution, render_pattern_resolution, render_tuple_field, render_expr, render_field, render_path_resolution, render_pattern_resolution,
render_tuple_field,
type_alias::{render_type_alias, render_type_alias_with_eq}, type_alias::{render_type_alias, render_type_alias_with_eq},
union_literal::render_union_literal, union_literal::render_union_literal,
RenderContext, RenderContext,
@ -157,6 +158,12 @@ impl Completions {
item.add_to(self, ctx.db); item.add_to(self, ctx.db);
} }
pub(crate) fn add_expr(&mut self, ctx: &CompletionContext<'_>, expr: &hir::term_search::Expr) {
if let Some(item) = render_expr(ctx, expr) {
item.add_to(self, ctx.db)
}
}
pub(crate) fn add_crate_roots( pub(crate) fn add_crate_roots(
&mut self, &mut self,
ctx: &CompletionContext<'_>, ctx: &CompletionContext<'_>,
@ -694,6 +701,7 @@ pub(super) fn complete_name_ref(
match &path_ctx.kind { match &path_ctx.kind {
PathKind::Expr { expr_ctx } => { PathKind::Expr { expr_ctx } => {
expr::complete_expr_path(acc, ctx, path_ctx, expr_ctx); expr::complete_expr_path(acc, ctx, path_ctx, expr_ctx);
expr::complete_expr(acc, ctx);
dot::complete_undotted_self(acc, ctx, path_ctx, expr_ctx); dot::complete_undotted_self(acc, ctx, path_ctx, expr_ctx);
item_list::complete_item_list_in_expr(acc, ctx, path_ctx, expr_ctx); item_list::complete_item_list_in_expr(acc, ctx, path_ctx, expr_ctx);

View file

@ -328,3 +328,59 @@ pub(crate) fn complete_expr_path(
} }
} }
} }
pub(crate) fn complete_expr(acc: &mut Completions, ctx: &CompletionContext<'_>) {
let _p = tracing::span!(tracing::Level::INFO, "complete_expr").entered();
if !ctx.config.enable_term_search {
return;
}
if !ctx.qualifier_ctx.none() {
return;
}
if let Some(ty) = &ctx.expected_type {
// Ignore unit types as they are not very interesting
if ty.is_unit() || ty.is_unknown() {
return;
}
let term_search_ctx = hir::term_search::TermSearchCtx {
sema: &ctx.sema,
scope: &ctx.scope,
goal: ty.clone(),
config: hir::term_search::TermSearchConfig {
enable_borrowcheck: false,
many_alternatives_threshold: 1,
depth: 6,
},
};
let exprs = hir::term_search::term_search(&term_search_ctx);
for expr in exprs {
// Expand method calls
match expr {
hir::term_search::Expr::Method { func, generics, target, params }
if target.is_many() =>
{
let target_ty = target.ty(ctx.db);
let term_search_ctx =
hir::term_search::TermSearchCtx { goal: target_ty, ..term_search_ctx };
let target_exprs = hir::term_search::term_search(&term_search_ctx);
for expr in target_exprs {
let expanded_expr = hir::term_search::Expr::Method {
func,
generics: generics.clone(),
target: Box::new(expr),
params: params.clone(),
};
acc.add_expr(ctx, &expanded_expr)
}
}
_ => acc.add_expr(ctx, &expr),
}
}
}
}

View file

@ -238,6 +238,8 @@ fn import_on_the_fly(
(PathKind::Type { location }, ItemInNs::Types(ty)) => { (PathKind::Type { location }, ItemInNs::Types(ty)) => {
if matches!(location, TypeLocation::TypeBound) { if matches!(location, TypeLocation::TypeBound) {
matches!(ty, ModuleDef::Trait(_)) matches!(ty, ModuleDef::Trait(_))
} else if matches!(location, TypeLocation::ImplTrait) {
matches!(ty, ModuleDef::Trait(_) | ModuleDef::Module(_))
} else { } else {
true true
} }

View file

@ -31,7 +31,7 @@
//! } //! }
//! ``` //! ```
use hir::{self, HasAttrs}; use hir::HasAttrs;
use ide_db::{ use ide_db::{
documentation::HasDocs, path_transform::PathTransform, documentation::HasDocs, path_transform::PathTransform,
syntax_helpers::insert_whitespace_into_node, traits::get_missing_assoc_items, SymbolKind, syntax_helpers::insert_whitespace_into_node, traits::get_missing_assoc_items, SymbolKind,

View file

@ -31,6 +31,11 @@ pub(crate) fn complete_type_path(
ScopeDef::ImplSelfType(_) => location.complete_self_type(), ScopeDef::ImplSelfType(_) => location.complete_self_type(),
// Don't suggest attribute macros and derives. // Don't suggest attribute macros and derives.
ScopeDef::ModuleDef(Macro(mac)) => mac.is_fn_like(ctx.db), ScopeDef::ModuleDef(Macro(mac)) => mac.is_fn_like(ctx.db),
ScopeDef::ModuleDef(Trait(_) | Module(_))
if matches!(location, TypeLocation::ImplTrait) =>
{
true
}
// Type things are fine // Type things are fine
ScopeDef::ModuleDef( ScopeDef::ModuleDef(
BuiltinType(_) | Adt(_) | Module(_) | Trait(_) | TraitAlias(_) | TypeAlias(_), BuiltinType(_) | Adt(_) | Module(_) | Trait(_) | TraitAlias(_) | TypeAlias(_),
@ -184,6 +189,21 @@ pub(crate) fn complete_type_path(
} }
} }
} }
TypeLocation::ImplTrait => {
acc.add_nameref_keywords_with_colon(ctx);
ctx.process_all_names(&mut |name, def, doc_aliases| {
let is_trait_or_module = matches!(
def,
ScopeDef::ModuleDef(
hir::ModuleDef::Module(_) | hir::ModuleDef::Trait(_)
)
);
if is_trait_or_module {
acc.add_path_resolution(ctx, path_ctx, name, def, doc_aliases);
}
});
return;
}
_ => {} _ => {}
}; };

View file

@ -14,6 +14,7 @@ pub struct CompletionConfig {
pub enable_imports_on_the_fly: bool, pub enable_imports_on_the_fly: bool,
pub enable_self_on_the_fly: bool, pub enable_self_on_the_fly: bool,
pub enable_private_editable: bool, pub enable_private_editable: bool,
pub enable_term_search: bool,
pub full_function_signatures: bool, pub full_function_signatures: bool,
pub callable: Option<CallableSnippets>, pub callable: Option<CallableSnippets>,
pub snippet_cap: Option<SnippetCap>, pub snippet_cap: Option<SnippetCap>,

View file

@ -202,6 +202,7 @@ impl TypeLocation {
} }
TypeLocation::AssocConstEq => false, TypeLocation::AssocConstEq => false,
TypeLocation::AssocTypeEq => true, TypeLocation::AssocTypeEq => true,
TypeLocation::ImplTrait => false,
_ => true, _ => true,
} }
} }
@ -716,7 +717,7 @@ impl<'a> CompletionContext<'a> {
let krate = scope.krate(); let krate = scope.krate();
let module = scope.module(); let module = scope.module();
let toolchain = db.crate_graph()[krate.into()].channel(); let toolchain = db.toolchain_channel(krate.into());
// `toolchain == None` means we're in some detached files. Since we have no information on // `toolchain == None` means we're in some detached files. Since we have no information on
// the toolchain being used, let's just allow unstable items to be listed. // the toolchain being used, let's just allow unstable items to be listed.
let is_nightly = matches!(toolchain, Some(base_db::ReleaseChannel::Nightly) | None); let is_nightly = matches!(toolchain, Some(base_db::ReleaseChannel::Nightly) | None);

View file

@ -166,6 +166,8 @@ pub struct CompletionRelevance {
pub postfix_match: Option<CompletionRelevancePostfixMatch>, pub postfix_match: Option<CompletionRelevancePostfixMatch>,
/// This is set for type inference results /// This is set for type inference results
pub is_definite: bool, pub is_definite: bool,
/// This is set for items that are function (associated or method)
pub function: Option<CompletionRelevanceFn>,
} }
#[derive(Debug, Clone, Copy, Eq, PartialEq)] #[derive(Debug, Clone, Copy, Eq, PartialEq)]
@ -207,6 +209,24 @@ pub enum CompletionRelevancePostfixMatch {
Exact, Exact,
} }
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub struct CompletionRelevanceFn {
pub has_params: bool,
pub has_self_param: bool,
pub return_type: CompletionRelevanceReturnType,
}
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub enum CompletionRelevanceReturnType {
Other,
/// Returns the Self type of the impl/trait
DirectConstructor,
/// Returns something that indirectly constructs the `Self` type of the impl/trait e.g. `Result<Self, ()>`, `Option<Self>`
Constructor,
/// Returns a possible builder for the type
Builder,
}
impl CompletionRelevance { impl CompletionRelevance {
/// Provides a relevance score. Higher values are more relevant. /// Provides a relevance score. Higher values are more relevant.
/// ///
@ -231,6 +251,7 @@ impl CompletionRelevance {
postfix_match, postfix_match,
is_definite, is_definite,
is_item_from_notable_trait, is_item_from_notable_trait,
function,
} = self; } = self;
// lower rank private things // lower rank private things
@ -275,6 +296,33 @@ impl CompletionRelevance {
if is_definite { if is_definite {
score += 10; score += 10;
} }
score += function
.map(|asf| {
let mut fn_score = match asf.return_type {
CompletionRelevanceReturnType::DirectConstructor => 15,
CompletionRelevanceReturnType::Builder => 10,
CompletionRelevanceReturnType::Constructor => 5,
CompletionRelevanceReturnType::Other => 0,
};
// When a fn is bumped due to return type:
// Bump Constructor or Builder methods with no arguments,
// over them tha with self arguments
if fn_score > 0 {
if !asf.has_params {
// bump associated functions
fn_score += 1;
} else if asf.has_self_param {
// downgrade methods (below Constructor)
fn_score = 1;
}
}
fn_score
})
.unwrap_or_default();
score score
} }
@ -297,6 +345,7 @@ pub enum CompletionItemKind {
Method, Method,
Snippet, Snippet,
UnresolvedReference, UnresolvedReference,
Expression,
} }
impl_from!(SymbolKind for CompletionItemKind); impl_from!(SymbolKind for CompletionItemKind);
@ -341,6 +390,7 @@ impl CompletionItemKind {
CompletionItemKind::Method => "me", CompletionItemKind::Method => "me",
CompletionItemKind::Snippet => "sn", CompletionItemKind::Snippet => "sn",
CompletionItemKind::UnresolvedReference => "??", CompletionItemKind::UnresolvedReference => "??",
CompletionItemKind::Expression => "ex",
} }
} }
} }

View file

@ -17,7 +17,7 @@ use ide_db::{
imports::import_assets::LocatedImport, imports::import_assets::LocatedImport,
RootDatabase, SnippetCap, SymbolKind, RootDatabase, SnippetCap, SymbolKind,
}; };
use syntax::{format_smolstr, AstNode, SmolStr, SyntaxKind, TextRange}; use syntax::{ast, format_smolstr, AstNode, SmolStr, SyntaxKind, TextRange};
use text_edit::TextEdit; use text_edit::TextEdit;
use crate::{ use crate::{
@ -272,6 +272,82 @@ pub(crate) fn render_resolution_with_import_pat(
Some(render_resolution_pat(ctx, pattern_ctx, local_name, Some(import_edit), resolution)) Some(render_resolution_pat(ctx, pattern_ctx, local_name, Some(import_edit), resolution))
} }
pub(crate) fn render_expr(
ctx: &CompletionContext<'_>,
expr: &hir::term_search::Expr,
) -> Option<Builder> {
let mut i = 1;
let mut snippet_formatter = |ty: &hir::Type| {
let arg_name = ty
.as_adt()
.and_then(|adt| adt.name(ctx.db).as_text())
.map(|s| stdx::to_lower_snake_case(s.as_str()))
.unwrap_or_else(|| String::from("_"));
let res = format!("${{{i}:{arg_name}}}");
i += 1;
res
};
let mut label_formatter = |ty: &hir::Type| {
ty.as_adt()
.and_then(|adt| adt.name(ctx.db).as_text())
.map(|s| stdx::to_lower_snake_case(s.as_str()))
.unwrap_or_else(|| String::from("..."))
};
let label = expr
.gen_source_code(
&ctx.scope,
&mut label_formatter,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)
.ok()?;
let source_range = match ctx.original_token.parent() {
Some(node) => match node.ancestors().find_map(ast::Path::cast) {
Some(path) => path.syntax().text_range(),
None => node.text_range(),
},
None => ctx.source_range(),
};
let mut item = CompletionItem::new(CompletionItemKind::Expression, source_range, label.clone());
let snippet = format!(
"{}$0",
expr.gen_source_code(
&ctx.scope,
&mut snippet_formatter,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude
)
.ok()?
);
let edit = TextEdit::replace(source_range, snippet);
item.snippet_edit(ctx.config.snippet_cap?, edit);
item.documentation(Documentation::new(String::from("Autogenerated expression by term search")));
item.set_relevance(crate::CompletionRelevance {
type_match: compute_type_match(ctx, &expr.ty(ctx.db)),
..Default::default()
});
for trait_ in expr.traits_used(ctx.db) {
let trait_item = hir::ItemInNs::from(hir::ModuleDef::from(trait_));
let Some(path) = ctx.module.find_use_path(
ctx.db,
trait_item,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
) else {
continue;
};
item.add_import(LocatedImport::new(path, trait_item, trait_item));
}
Some(item)
}
fn scope_def_to_name( fn scope_def_to_name(
resolution: ScopeDef, resolution: ScopeDef,
ctx: &RenderContext<'_>, ctx: &RenderContext<'_>,
@ -599,6 +675,16 @@ mod tests {
expect.assert_debug_eq(&actual); expect.assert_debug_eq(&actual);
} }
#[track_caller]
fn check_function_relevance(ra_fixture: &str, expect: Expect) {
let actual: Vec<_> = do_completion(ra_fixture, CompletionItemKind::Method)
.into_iter()
.map(|item| (item.detail.unwrap_or_default(), item.relevance.function))
.collect();
expect.assert_debug_eq(&actual);
}
#[track_caller] #[track_caller]
fn check_relevance_for_kinds(ra_fixture: &str, kinds: &[CompletionItemKind], expect: Expect) { fn check_relevance_for_kinds(ra_fixture: &str, kinds: &[CompletionItemKind], expect: Expect) {
let mut actual = get_all_items(TEST_CONFIG, ra_fixture, None); let mut actual = get_all_items(TEST_CONFIG, ra_fixture, None);
@ -961,6 +1047,7 @@ fn func(input: Struct) { }
st Self [type] st Self [type]
sp Self [type] sp Self [type]
st Struct [type] st Struct [type]
ex Struct [type]
lc self [local] lc self [local]
fn func() [] fn func() []
me self.test() [] me self.test() []
@ -985,6 +1072,9 @@ fn main() {
"#, "#,
expect![[r#" expect![[r#"
lc input [type+name+local] lc input [type+name+local]
ex input [type]
ex true [type]
ex false [type]
lc inputbad [local] lc inputbad [local]
fn main() [] fn main() []
fn test() [] fn test() []
@ -1174,6 +1264,7 @@ fn main() { let _: m::Spam = S$0 }
is_private_editable: false, is_private_editable: false,
postfix_match: None, postfix_match: None,
is_definite: false, is_definite: false,
function: None,
}, },
trigger_call_info: true, trigger_call_info: true,
}, },
@ -1201,6 +1292,7 @@ fn main() { let _: m::Spam = S$0 }
is_private_editable: false, is_private_editable: false,
postfix_match: None, postfix_match: None,
is_definite: false, is_definite: false,
function: None,
}, },
trigger_call_info: true, trigger_call_info: true,
}, },
@ -1280,6 +1372,7 @@ fn foo() { A { the$0 } }
is_private_editable: false, is_private_editable: false,
postfix_match: None, postfix_match: None,
is_definite: false, is_definite: false,
function: None,
}, },
}, },
] ]
@ -1313,6 +1406,26 @@ impl S {
documentation: Documentation( documentation: Documentation(
"Method docs", "Method docs",
), ),
relevance: CompletionRelevance {
exact_name_match: false,
type_match: None,
is_local: false,
is_item_from_trait: false,
is_item_from_notable_trait: false,
is_name_already_imported: false,
requires_import: false,
is_op_method: false,
is_private_editable: false,
postfix_match: None,
is_definite: false,
function: Some(
CompletionRelevanceFn {
has_params: true,
has_self_param: true,
return_type: Other,
},
),
},
}, },
CompletionItem { CompletionItem {
label: "foo", label: "foo",
@ -1418,6 +1531,26 @@ fn foo(s: S) { s.$0 }
kind: Method, kind: Method,
lookup: "the_method", lookup: "the_method",
detail: "fn(&self)", detail: "fn(&self)",
relevance: CompletionRelevance {
exact_name_match: false,
type_match: None,
is_local: false,
is_item_from_trait: false,
is_item_from_notable_trait: false,
is_name_already_imported: false,
requires_import: false,
is_op_method: false,
is_private_editable: false,
postfix_match: None,
is_definite: false,
function: Some(
CompletionRelevanceFn {
has_params: true,
has_self_param: true,
return_type: Other,
},
),
},
}, },
] ]
"#]], "#]],
@ -1665,6 +1798,10 @@ fn f() { A { bar: b$0 }; }
expect![[r#" expect![[r#"
fn bar() [type+name] fn bar() [type+name]
fn baz() [type] fn baz() [type]
ex baz() [type]
ex bar() [type]
ex A { bar: baz() }.bar [type]
ex A { bar: bar() }.bar [type]
st A [] st A []
fn f() [] fn f() []
"#]], "#]],
@ -1749,6 +1886,8 @@ fn main() {
lc s [type+name+local] lc s [type+name+local]
st S [type] st S [type]
st S [type] st S [type]
ex s [type]
ex S [type]
fn foo() [] fn foo() []
fn main() [] fn main() []
"#]], "#]],
@ -1766,6 +1905,8 @@ fn main() {
lc ssss [type+local] lc ssss [type+local]
st S [type] st S [type]
st S [type] st S [type]
ex ssss [type]
ex S [type]
fn foo() [] fn foo() []
fn main() [] fn main() []
"#]], "#]],
@ -1798,6 +1939,8 @@ fn main() {
} }
"#, "#,
expect![[r#" expect![[r#"
ex core::ops::Deref::deref(&T(S)) (use core::ops::Deref) [type_could_unify]
ex core::ops::Deref::deref(&t) (use core::ops::Deref) [type_could_unify]
lc m [local] lc m [local]
lc t [local] lc t [local]
lc &t [type+local] lc &t [type+local]
@ -1846,6 +1989,8 @@ fn main() {
} }
"#, "#,
expect![[r#" expect![[r#"
ex core::ops::DerefMut::deref_mut(&mut T(S)) (use core::ops::DerefMut) [type_could_unify]
ex core::ops::DerefMut::deref_mut(&mut t) (use core::ops::DerefMut) [type_could_unify]
lc m [local] lc m [local]
lc t [local] lc t [local]
lc &mut t [type+local] lc &mut t [type+local]
@ -1894,6 +2039,8 @@ fn bar(t: Foo) {}
ev Foo::A [type] ev Foo::A [type]
ev Foo::B [type] ev Foo::B [type]
en Foo [type] en Foo [type]
ex Foo::A [type]
ex Foo::B [type]
fn bar() [] fn bar() []
fn foo() [] fn foo() []
"#]], "#]],
@ -1947,6 +2094,8 @@ fn main() {
} }
"#, "#,
expect![[r#" expect![[r#"
ex core::ops::Deref::deref(&T(S)) (use core::ops::Deref) [type_could_unify]
ex core::ops::Deref::deref(&bar()) (use core::ops::Deref) [type_could_unify]
st S [] st S []
st &S [type] st &S [type]
st S [] st S []
@ -2002,6 +2151,254 @@ fn main() {
); );
} }
#[test]
fn constructor_order_simple() {
check_relevance(
r#"
struct Foo;
struct Other;
struct Option<T>(T);
impl Foo {
fn fn_ctr() -> Foo { unimplemented!() }
fn fn_another(n: u32) -> Other { unimplemented!() }
fn fn_ctr_self() -> Option<Self> { unimplemented!() }
}
fn test() {
let a = Foo::$0;
}
"#,
expect![[r#"
fn fn_ctr() [type_could_unify]
fn fn_ctr_self() [type_could_unify]
fn fn_another() [type_could_unify]
"#]],
);
}
#[test]
fn constructor_order_kind() {
check_function_relevance(
r#"
struct Foo;
struct Bar;
struct Option<T>(T);
enum Result<T, E> { Ok(T), Err(E) };
impl Foo {
fn fn_ctr(&self) -> Foo { unimplemented!() }
fn fn_ctr_with_args(&self, n: u32) -> Foo { unimplemented!() }
fn fn_another(&self, n: u32) -> Bar { unimplemented!() }
fn fn_ctr_wrapped(&self, ) -> Option<Self> { unimplemented!() }
fn fn_ctr_wrapped_2(&self, ) -> Result<Self, Bar> { unimplemented!() }
fn fn_ctr_wrapped_3(&self, ) -> Result<Bar, Self> { unimplemented!() } // Self is not the first type
fn fn_ctr_wrapped_with_args(&self, m: u32) -> Option<Self> { unimplemented!() }
fn fn_another_unit(&self) { unimplemented!() }
}
fn test() {
let a = self::Foo::$0;
}
"#,
expect![[r#"
[
(
"fn(&self, u32) -> Bar",
Some(
CompletionRelevanceFn {
has_params: true,
has_self_param: true,
return_type: Other,
},
),
),
(
"fn(&self)",
Some(
CompletionRelevanceFn {
has_params: true,
has_self_param: true,
return_type: Other,
},
),
),
(
"fn(&self) -> Foo",
Some(
CompletionRelevanceFn {
has_params: true,
has_self_param: true,
return_type: DirectConstructor,
},
),
),
(
"fn(&self, u32) -> Foo",
Some(
CompletionRelevanceFn {
has_params: true,
has_self_param: true,
return_type: DirectConstructor,
},
),
),
(
"fn(&self) -> Option<Foo>",
Some(
CompletionRelevanceFn {
has_params: true,
has_self_param: true,
return_type: Constructor,
},
),
),
(
"fn(&self) -> Result<Foo, Bar>",
Some(
CompletionRelevanceFn {
has_params: true,
has_self_param: true,
return_type: Constructor,
},
),
),
(
"fn(&self) -> Result<Bar, Foo>",
Some(
CompletionRelevanceFn {
has_params: true,
has_self_param: true,
return_type: Constructor,
},
),
),
(
"fn(&self, u32) -> Option<Foo>",
Some(
CompletionRelevanceFn {
has_params: true,
has_self_param: true,
return_type: Constructor,
},
),
),
]
"#]],
);
}
#[test]
fn constructor_order_relevance() {
check_relevance(
r#"
struct Foo;
struct FooBuilder;
struct Result<T>(T);
impl Foo {
fn fn_no_ret(&self) {}
fn fn_ctr_with_args(input: u32) -> Foo { unimplemented!() }
fn fn_direct_ctr() -> Self { unimplemented!() }
fn fn_ctr() -> Result<Self> { unimplemented!() }
fn fn_other() -> Result<u32> { unimplemented!() }
fn fn_builder() -> FooBuilder { unimplemented!() }
}
fn test() {
let a = self::Foo::$0;
}
"#,
// preference:
// Direct Constructor
// Direct Constructor with args
// Builder
// Constructor
// Others
expect![[r#"
fn fn_direct_ctr() [type_could_unify]
fn fn_ctr_with_args() [type_could_unify]
fn fn_builder() [type_could_unify]
fn fn_ctr() [type_could_unify]
me fn_no_ret() [type_could_unify]
fn fn_other() [type_could_unify]
"#]],
);
//
}
#[test]
fn function_relevance_generic_1() {
check_relevance(
r#"
struct Foo<T: Default>(T);
struct FooBuilder;
struct Option<T>(T);
enum Result<T, E>{Ok(T), Err(E)};
impl<T: Default> Foo<T> {
fn fn_returns_unit(&self) {}
fn fn_ctr_with_args(input: T) -> Foo<T> { unimplemented!() }
fn fn_direct_ctr() -> Self { unimplemented!() }
fn fn_ctr_wrapped() -> Option<Self> { unimplemented!() }
fn fn_ctr_wrapped_2() -> Result<Self, u32> { unimplemented!() }
fn fn_other() -> Option<u32> { unimplemented!() }
fn fn_builder() -> FooBuilder { unimplemented!() }
}
fn test() {
let a = self::Foo::<u32>::$0;
}
"#,
expect![[r#"
fn fn_direct_ctr() [type_could_unify]
fn fn_ctr_with_args() [type_could_unify]
fn fn_builder() [type_could_unify]
fn fn_ctr_wrapped() [type_could_unify]
fn fn_ctr_wrapped_2() [type_could_unify]
me fn_returns_unit() [type_could_unify]
fn fn_other() [type_could_unify]
"#]],
);
}
#[test]
fn function_relevance_generic_2() {
// Generic 2
check_relevance(
r#"
struct Foo<T: Default>(T);
struct FooBuilder;
struct Option<T>(T);
enum Result<T, E>{Ok(T), Err(E)};
impl<T: Default> Foo<T> {
fn fn_no_ret(&self) {}
fn fn_ctr_with_args(input: T) -> Foo<T> { unimplemented!() }
fn fn_direct_ctr() -> Self { unimplemented!() }
fn fn_ctr() -> Option<Self> { unimplemented!() }
fn fn_ctr2() -> Result<Self, u32> { unimplemented!() }
fn fn_other() -> Option<u32> { unimplemented!() }
fn fn_builder() -> FooBuilder { unimplemented!() }
}
fn test() {
let a : Res<Foo<u32>> = Foo::$0;
}
"#,
expect![[r#"
fn fn_direct_ctr() [type_could_unify]
fn fn_ctr_with_args() [type_could_unify]
fn fn_builder() [type_could_unify]
fn fn_ctr() [type_could_unify]
fn fn_ctr2() [type_could_unify]
me fn_no_ret() [type_could_unify]
fn fn_other() [type_could_unify]
"#]],
);
}
#[test] #[test]
fn struct_field_method_ref() { fn struct_field_method_ref() {
check_kinds( check_kinds(
@ -2022,6 +2419,26 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 }
kind: Method, kind: Method,
lookup: "baz", lookup: "baz",
detail: "fn(&self) -> u32", detail: "fn(&self) -> u32",
relevance: CompletionRelevance {
exact_name_match: false,
type_match: None,
is_local: false,
is_item_from_trait: false,
is_item_from_notable_trait: false,
is_name_already_imported: false,
requires_import: false,
is_op_method: false,
is_private_editable: false,
postfix_match: None,
is_definite: false,
function: Some(
CompletionRelevanceFn {
has_params: true,
has_self_param: true,
return_type: Other,
},
),
},
ref_match: "&@107", ref_match: "&@107",
}, },
CompletionItem { CompletionItem {
@ -2096,6 +2513,7 @@ fn foo() {
is_private_editable: false, is_private_editable: false,
postfix_match: None, postfix_match: None,
is_definite: false, is_definite: false,
function: None,
}, },
}, },
] ]
@ -2133,6 +2551,26 @@ fn main() {
), ),
lookup: "foo", lookup: "foo",
detail: "fn() -> S", detail: "fn() -> S",
relevance: CompletionRelevance {
exact_name_match: false,
type_match: None,
is_local: false,
is_item_from_trait: false,
is_item_from_notable_trait: false,
is_name_already_imported: false,
requires_import: false,
is_op_method: false,
is_private_editable: false,
postfix_match: None,
is_definite: false,
function: Some(
CompletionRelevanceFn {
has_params: false,
has_self_param: false,
return_type: Other,
},
),
},
ref_match: "&@92", ref_match: "&@92",
}, },
] ]
@ -2160,6 +2598,7 @@ fn foo() {
"#, "#,
expect![[r#" expect![[r#"
lc foo [type+local] lc foo [type+local]
ex foo [type]
ev Foo::A() [type_could_unify] ev Foo::A() [type_could_unify]
ev Foo::B [type_could_unify] ev Foo::B [type_could_unify]
en Foo [type_could_unify] en Foo [type_could_unify]
@ -2493,6 +2932,7 @@ fn main() {
is_private_editable: false, is_private_editable: false,
postfix_match: None, postfix_match: None,
is_definite: false, is_definite: false,
function: None,
}, },
}, },
CompletionItem { CompletionItem {
@ -2515,6 +2955,7 @@ fn main() {
is_private_editable: false, is_private_editable: false,
postfix_match: None, postfix_match: None,
is_definite: false, is_definite: false,
function: None,
}, },
}, },
] ]

View file

@ -8,8 +8,13 @@ use syntax::{format_smolstr, AstNode, SmolStr};
use crate::{ use crate::{
context::{CompletionContext, DotAccess, DotAccessKind, PathCompletionCtx, PathKind}, context::{CompletionContext, DotAccess, DotAccessKind, PathCompletionCtx, PathKind},
item::{Builder, CompletionItem, CompletionItemKind, CompletionRelevance}, item::{
render::{compute_exact_name_match, compute_ref_match, compute_type_match, RenderContext}, Builder, CompletionItem, CompletionItemKind, CompletionRelevance, CompletionRelevanceFn,
CompletionRelevanceReturnType,
},
render::{
compute_exact_name_match, compute_ref_match, compute_type_match, match_types, RenderContext,
},
CallableSnippets, CallableSnippets,
}; };
@ -61,9 +66,9 @@ fn render(
), ),
_ => (name.unescaped().to_smol_str(), name.to_smol_str()), _ => (name.unescaped().to_smol_str(), name.to_smol_str()),
}; };
let has_self_param = func.self_param(db).is_some();
let mut item = CompletionItem::new( let mut item = CompletionItem::new(
if func.self_param(db).is_some() { if has_self_param {
CompletionItemKind::Method CompletionItemKind::Method
} else { } else {
CompletionItemKind::SymbolKind(SymbolKind::Function) CompletionItemKind::SymbolKind(SymbolKind::Function)
@ -99,6 +104,15 @@ fn render(
.filter(|_| !has_call_parens) .filter(|_| !has_call_parens)
.and_then(|cap| Some((cap, params(ctx.completion, func, &func_kind, has_dot_receiver)?))); .and_then(|cap| Some((cap, params(ctx.completion, func, &func_kind, has_dot_receiver)?)));
let function = assoc_item
.and_then(|assoc_item| assoc_item.implementing_ty(db))
.map(|self_type| compute_return_type_match(db, &ctx, self_type, &ret_type))
.map(|return_type| CompletionRelevanceFn {
has_params: has_self_param || func.num_params(db) > 0,
has_self_param,
return_type,
});
item.set_relevance(CompletionRelevance { item.set_relevance(CompletionRelevance {
type_match: if has_call_parens || complete_call_parens.is_some() { type_match: if has_call_parens || complete_call_parens.is_some() {
compute_type_match(completion, &ret_type) compute_type_match(completion, &ret_type)
@ -106,6 +120,7 @@ fn render(
compute_type_match(completion, &func.ty(db)) compute_type_match(completion, &func.ty(db))
}, },
exact_name_match: compute_exact_name_match(completion, &call), exact_name_match: compute_exact_name_match(completion, &call),
function,
is_op_method, is_op_method,
is_item_from_notable_trait, is_item_from_notable_trait,
..ctx.completion_relevance() ..ctx.completion_relevance()
@ -156,6 +171,33 @@ fn render(
item item
} }
fn compute_return_type_match(
db: &dyn HirDatabase,
ctx: &RenderContext<'_>,
self_type: hir::Type,
ret_type: &hir::Type,
) -> CompletionRelevanceReturnType {
if match_types(ctx.completion, &self_type, ret_type).is_some() {
// fn([..]) -> Self
CompletionRelevanceReturnType::DirectConstructor
} else if ret_type
.type_arguments()
.any(|ret_type_arg| match_types(ctx.completion, &self_type, &ret_type_arg).is_some())
{
// fn([..]) -> Result<Self, E> OR Wrapped<Foo, Self>
CompletionRelevanceReturnType::Constructor
} else if ret_type
.as_adt()
.and_then(|adt| adt.name(db).as_str().map(|name| name.ends_with("Builder")))
.unwrap_or(false)
{
// fn([..]) -> [..]Builder
CompletionRelevanceReturnType::Builder
} else {
CompletionRelevanceReturnType::Other
}
}
pub(super) fn add_call_parens<'b>( pub(super) fn add_call_parens<'b>(
builder: &'b mut Builder, builder: &'b mut Builder,
ctx: &CompletionContext<'_>, ctx: &CompletionContext<'_>,

View file

@ -65,6 +65,7 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig {
enable_imports_on_the_fly: true, enable_imports_on_the_fly: true,
enable_self_on_the_fly: true, enable_self_on_the_fly: true,
enable_private_editable: false, enable_private_editable: false,
enable_term_search: true,
full_function_signatures: false, full_function_signatures: false,
callable: Some(CallableSnippets::FillArguments), callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true), snippet_cap: SnippetCap::new(true),

View file

@ -97,6 +97,11 @@ fn func(param0 @ (param1, param2): (i32, i32)) {
kw unsafe kw unsafe
kw while kw while
kw while let kw while let
ex ifletlocal
ex letlocal
ex matcharm
ex param1
ex param2
"#]], "#]],
); );
} }
@ -241,6 +246,8 @@ fn complete_in_block() {
sn macro_rules sn macro_rules
sn pd sn pd
sn ppd sn ppd
ex false
ex true
"#]], "#]],
) )
} }
@ -542,7 +549,26 @@ fn quux(x: i32) {
m!(x$0 m!(x$0
} }
"#, "#,
expect![[r#""#]], expect![[r#"
fn quux() fn(i32)
lc x i32
lc y i32
ma m!() macro_rules! m
bt u32 u32
kw crate::
kw false
kw for
kw if
kw if let
kw loop
kw match
kw return
kw self::
kw true
kw unsafe
kw while
kw while let
"#]],
); );
} }
@ -682,7 +708,9 @@ fn main() {
} }
"#, "#,
expect![[r#" expect![[r#"
fn test() fn() -> Zulu fn test() fn() -> Zulu
ex Zulu
ex Zulu::test()
"#]], "#]],
); );
} }

View file

@ -1397,3 +1397,22 @@ pub use bridge2::server2::Span2;
"#]], "#]],
); );
} }
#[test]
fn flyimport_only_traits_in_impl_trait_block() {
check(
r#"
//- /main.rs crate:main deps:dep
pub struct Bar;
impl Foo$0 for Bar { }
//- /lib.rs crate:dep
pub trait FooTrait;
pub struct FooStruct;
"#,
expect![[r#"
tt FooTrait (use dep::FooTrait)
"#]],
);
}

View file

@ -192,6 +192,8 @@ fn main() {
bt u32 u32 bt u32 u32
kw crate:: kw crate::
kw self:: kw self::
ex Foo::default()
ex foo
"#]], "#]],
); );
check( check(

View file

@ -225,10 +225,10 @@ impl S {
fn foo() { let _ = lib::S::$0 } fn foo() { let _ = lib::S::$0 }
"#, "#,
expect![[r#" expect![[r#"
ct PUBLIC_CONST pub const PUBLIC_CONST: u32 ct PUBLIC_CONST pub const PUBLIC_CONST: u32
fn public_method() fn() fn public_method() fn()
ta PublicType pub type PublicType = u32 ta PublicType pub type PublicType = u32
"#]], "#]],
); );
} }
@ -242,8 +242,8 @@ impl U { fn m() { } }
fn foo() { let _ = U::$0 } fn foo() { let _ = U::$0 }
"#, "#,
expect![[r#" expect![[r#"
fn m() fn() fn m() fn()
"#]], "#]],
); );
} }
@ -256,8 +256,8 @@ trait Trait { fn m(); }
fn foo() { let _ = Trait::$0 } fn foo() { let _ = Trait::$0 }
"#, "#,
expect![[r#" expect![[r#"
fn m() (as Trait) fn() fn m() (as Trait) fn()
"#]], "#]],
); );
} }
@ -273,8 +273,8 @@ impl Trait for S {}
fn foo() { let _ = S::$0 } fn foo() { let _ = S::$0 }
"#, "#,
expect![[r#" expect![[r#"
fn m() (as Trait) fn() fn m() (as Trait) fn()
"#]], "#]],
); );
} }
@ -290,8 +290,8 @@ impl Trait for S {}
fn foo() { let _ = <S as Trait>::$0 } fn foo() { let _ = <S as Trait>::$0 }
"#, "#,
expect![[r#" expect![[r#"
fn m() (as Trait) fn() fn m() (as Trait) fn()
"#]], "#]],
); );
} }
@ -396,9 +396,9 @@ macro_rules! foo { () => {} }
fn main() { let _ = crate::$0 } fn main() { let _ = crate::$0 }
"#, "#,
expect![[r#" expect![[r#"
fn main() fn() fn main() fn()
ma foo!() macro_rules! foo ma foo!() macro_rules! foo
"#]], "#]],
); );
} }
@ -694,8 +694,10 @@ fn bar() -> Bar {
} }
"#, "#,
expect![[r#" expect![[r#"
fn foo() (as Foo) fn() -> Self fn foo() (as Foo) fn() -> Self
"#]], ex Bar
ex bar()
"#]],
); );
} }
@ -722,6 +724,8 @@ fn bar() -> Bar {
expect![[r#" expect![[r#"
fn bar() fn() fn bar() fn()
fn foo() (as Foo) fn() -> Self fn foo() (as Foo) fn() -> Self
ex Bar
ex bar()
"#]], "#]],
); );
} }
@ -748,6 +752,8 @@ fn bar() -> Bar {
"#, "#,
expect![[r#" expect![[r#"
fn foo() (as Foo) fn() -> Self fn foo() (as Foo) fn() -> Self
ex Bar
ex bar()
"#]], "#]],
); );
} }

View file

@ -989,3 +989,43 @@ fn foo<'a>() { S::<'static, F$0, _, _>; }
"#]], "#]],
); );
} }
#[test]
fn complete_traits_on_impl_trait_block() {
check(
r#"
trait Foo {}
struct Bar;
impl $0 for Bar { }
"#,
expect![[r#"
md module
tt Foo
tt Trait
kw crate::
kw self::
"#]],
);
}
#[test]
fn complete_traits_with_path_on_impl_trait_block() {
check(
r#"
mod outer {
pub trait Foo {}
pub struct Bar;
pub mod inner {
}
}
impl outer::$0 for Bar { }
"#,
expect![[r#"
md inner
tt Foo
"#]],
);
}

View file

@ -114,6 +114,14 @@ impl FamousDefs<'_, '_> {
self.find_function("core:mem:drop") self.find_function("core:mem:drop")
} }
pub fn core_macros_todo(&self) -> Option<Macro> {
self.find_macro("core:todo")
}
pub fn core_macros_unimplemented(&self) -> Option<Macro> {
self.find_macro("core:unimplemented")
}
pub fn builtin_crates(&self) -> impl Iterator<Item = Crate> { pub fn builtin_crates(&self) -> impl Iterator<Item = Crate> {
IntoIterator::into_iter([ IntoIterator::into_iter([
self.std(), self.std(),

View file

@ -148,7 +148,7 @@ impl<'a> PathTransform<'a> {
let mut defaulted_params: Vec<DefaultedParam> = Default::default(); let mut defaulted_params: Vec<DefaultedParam> = Default::default();
self.generic_def self.generic_def
.into_iter() .into_iter()
.flat_map(|it| it.type_params(db)) .flat_map(|it| it.type_or_const_params(db))
.skip(skip) .skip(skip)
// The actual list of trait type parameters may be longer than the one // The actual list of trait type parameters may be longer than the one
// used in the `impl` block due to trailing default type parameters. // used in the `impl` block due to trailing default type parameters.

View file

@ -71,7 +71,6 @@ impl Definition {
&self, &self,
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
new_name: &str, new_name: &str,
rename_external: bool,
) -> Result<SourceChange> { ) -> Result<SourceChange> {
// self.krate() returns None if // self.krate() returns None if
// self is a built-in attr, built-in type or tool module. // self is a built-in attr, built-in type or tool module.
@ -80,8 +79,8 @@ impl Definition {
if let Some(krate) = self.krate(sema.db) { if let Some(krate) = self.krate(sema.db) {
// Can we not rename non-local items? // Can we not rename non-local items?
// Then bail if non-local // Then bail if non-local
if !rename_external && !krate.origin(sema.db).is_local() { if !krate.origin(sema.db).is_local() {
bail!("Cannot rename a non-local definition as the config for it is disabled") bail!("Cannot rename a non-local definition")
} }
} }

View file

@ -138,7 +138,7 @@ impl SnippetEdit {
.into_iter() .into_iter()
.zip(1..) .zip(1..)
.with_position() .with_position()
.map(|pos| { .flat_map(|pos| {
let (snippet, index) = match pos { let (snippet, index) = match pos {
(itertools::Position::First, it) | (itertools::Position::Middle, it) => it, (itertools::Position::First, it) | (itertools::Position::Middle, it) => it,
// last/only snippet gets index 0 // last/only snippet gets index 0
@ -146,11 +146,13 @@ impl SnippetEdit {
| (itertools::Position::Only, (snippet, _)) => (snippet, 0), | (itertools::Position::Only, (snippet, _)) => (snippet, 0),
}; };
let range = match snippet { match snippet {
Snippet::Tabstop(pos) => TextRange::empty(pos), Snippet::Tabstop(pos) => vec![(index, TextRange::empty(pos))],
Snippet::Placeholder(range) => range, Snippet::Placeholder(range) => vec![(index, range)],
}; Snippet::PlaceholderGroup(ranges) => {
(index, range) ranges.into_iter().map(|range| (index, range)).collect()
}
}
}) })
.collect_vec(); .collect_vec();
@ -248,7 +250,7 @@ impl SourceChangeBuilder {
fn commit(&mut self) { fn commit(&mut self) {
let snippet_edit = self.snippet_builder.take().map(|builder| { let snippet_edit = self.snippet_builder.take().map(|builder| {
SnippetEdit::new( SnippetEdit::new(
builder.places.into_iter().map(PlaceSnippet::finalize_position).collect_vec(), builder.places.into_iter().flat_map(PlaceSnippet::finalize_position).collect(),
) )
}); });
@ -287,30 +289,10 @@ impl SourceChangeBuilder {
pub fn insert(&mut self, offset: TextSize, text: impl Into<String>) { pub fn insert(&mut self, offset: TextSize, text: impl Into<String>) {
self.edit.insert(offset, text.into()) self.edit.insert(offset, text.into())
} }
/// Append specified `snippet` at the given `offset`
pub fn insert_snippet(
&mut self,
_cap: SnippetCap,
offset: TextSize,
snippet: impl Into<String>,
) {
self.source_change.is_snippet = true;
self.insert(offset, snippet);
}
/// Replaces specified `range` of text with a given string. /// Replaces specified `range` of text with a given string.
pub fn replace(&mut self, range: TextRange, replace_with: impl Into<String>) { pub fn replace(&mut self, range: TextRange, replace_with: impl Into<String>) {
self.edit.replace(range, replace_with.into()) self.edit.replace(range, replace_with.into())
} }
/// Replaces specified `range` of text with a given `snippet`.
pub fn replace_snippet(
&mut self,
_cap: SnippetCap,
range: TextRange,
snippet: impl Into<String>,
) {
self.source_change.is_snippet = true;
self.replace(range, snippet);
}
pub fn replace_ast<N: AstNode>(&mut self, old: N, new: N) { pub fn replace_ast<N: AstNode>(&mut self, old: N, new: N) {
algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit) algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit)
} }
@ -356,6 +338,17 @@ impl SourceChangeBuilder {
self.add_snippet(PlaceSnippet::Over(node.syntax().clone().into())) self.add_snippet(PlaceSnippet::Over(node.syntax().clone().into()))
} }
/// Adds a snippet to move the cursor selected over `nodes`
///
/// This allows for renaming newly generated items without having to go
/// through a separate rename step.
pub fn add_placeholder_snippet_group(&mut self, _cap: SnippetCap, nodes: Vec<SyntaxNode>) {
assert!(nodes.iter().all(|node| node.parent().is_some()));
self.add_snippet(PlaceSnippet::OverGroup(
nodes.into_iter().map(|node| node.into()).collect(),
))
}
fn add_snippet(&mut self, snippet: PlaceSnippet) { fn add_snippet(&mut self, snippet: PlaceSnippet) {
let snippet_builder = self.snippet_builder.get_or_insert(SnippetBuilder { places: vec![] }); let snippet_builder = self.snippet_builder.get_or_insert(SnippetBuilder { places: vec![] });
snippet_builder.places.push(snippet); snippet_builder.places.push(snippet);
@ -400,6 +393,13 @@ pub enum Snippet {
Tabstop(TextSize), Tabstop(TextSize),
/// A placeholder snippet (e.g. `${0:placeholder}`). /// A placeholder snippet (e.g. `${0:placeholder}`).
Placeholder(TextRange), Placeholder(TextRange),
/// A group of placeholder snippets, e.g.
///
/// ```no_run
/// let ${0:new_var} = 4;
/// fun(1, 2, 3, ${0:new_var});
/// ```
PlaceholderGroup(Vec<TextRange>),
} }
enum PlaceSnippet { enum PlaceSnippet {
@ -409,14 +409,20 @@ enum PlaceSnippet {
After(SyntaxElement), After(SyntaxElement),
/// Place a placeholder snippet in place of the element /// Place a placeholder snippet in place of the element
Over(SyntaxElement), Over(SyntaxElement),
/// Place a group of placeholder snippets which are linked together
/// in place of the elements
OverGroup(Vec<SyntaxElement>),
} }
impl PlaceSnippet { impl PlaceSnippet {
fn finalize_position(self) -> Snippet { fn finalize_position(self) -> Vec<Snippet> {
match self { match self {
PlaceSnippet::Before(it) => Snippet::Tabstop(it.text_range().start()), PlaceSnippet::Before(it) => vec![Snippet::Tabstop(it.text_range().start())],
PlaceSnippet::After(it) => Snippet::Tabstop(it.text_range().end()), PlaceSnippet::After(it) => vec![Snippet::Tabstop(it.text_range().end())],
PlaceSnippet::Over(it) => Snippet::Placeholder(it.text_range()), PlaceSnippet::Over(it) => vec![Snippet::Placeholder(it.text_range())],
PlaceSnippet::OverGroup(it) => {
vec![Snippet::PlaceholderGroup(it.into_iter().map(|it| it.text_range()).collect())]
}
} }
} }
} }

View file

@ -31,7 +31,7 @@ use base_db::{
salsa::{self, ParallelDatabase}, salsa::{self, ParallelDatabase},
SourceDatabaseExt, SourceRootId, Upcast, SourceDatabaseExt, SourceRootId, Upcast,
}; };
use fst::{self, raw::IndexedValue, Automaton, Streamer}; use fst::{raw::IndexedValue, Automaton, Streamer};
use hir::{ use hir::{
db::HirDatabase, db::HirDatabase,
import_map::{AssocSearchMode, SearchMode}, import_map::{AssocSearchMode, SearchMode},

View file

@ -329,6 +329,7 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) {
| ast::Expr::RecordExpr(_) | ast::Expr::RecordExpr(_)
| ast::Expr::RefExpr(_) | ast::Expr::RefExpr(_)
| ast::Expr::ReturnExpr(_) | ast::Expr::ReturnExpr(_)
| ast::Expr::BecomeExpr(_)
| ast::Expr::TryExpr(_) | ast::Expr::TryExpr(_)
| ast::Expr::TupleExpr(_) | ast::Expr::TupleExpr(_)
| ast::Expr::LetExpr(_) | ast::Expr::LetExpr(_)

View file

@ -43,7 +43,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Option<Vec<Ass
let label = format!("Rename to {}", d.suggested_text); let label = format!("Rename to {}", d.suggested_text);
let mut res = unresolved_fix("change_case", &label, frange.range); let mut res = unresolved_fix("change_case", &label, frange.range);
if ctx.resolve.should_resolve(&res.id) { if ctx.resolve.should_resolve(&res.id) {
let source_change = def.rename(&ctx.sema, &d.suggested_text, true); let source_change = def.rename(&ctx.sema, &d.suggested_text);
res.source_change = Some(source_change.ok().unwrap_or_default()); res.source_change = Some(source_change.ok().unwrap_or_default());
} }

View file

@ -242,7 +242,7 @@ macro_rules! foo {
fn f() { fn f() {
foo!(); foo!();
//^^^ error: invalid macro definition: expected subtree //^^^ error: macro definition has parse errors
} }
"#, "#,

View file

@ -310,6 +310,24 @@ fn main() {
); );
} }
#[test]
fn mismatched_types_issue_15883() {
// Check we don't panic.
check_diagnostics_no_bails(
r#"
//- minicore: option
fn main() {
match Some((true, false)) {
Some(true) | Some(false) => {}
// ^^^^ error: expected (bool, bool), found bool
// ^^^^^ error: expected (bool, bool), found bool
None => {}
}
}
"#,
);
}
#[test] #[test]
fn mismatched_types_in_or_patterns() { fn mismatched_types_in_or_patterns() {
cov_mark::check_count!(validate_match_bailed_out, 2); cov_mark::check_count!(validate_match_bailed_out, 2);

View file

@ -182,6 +182,18 @@ fn foo() -> u8 {
); );
} }
#[test]
fn no_diagnostic_if_not_last_statement2() {
check_diagnostics(
r#"
fn foo() -> u8 {
return 2;
fn bar() {}
}
"#,
);
}
#[test] #[test]
fn replace_with_expr() { fn replace_with_expr() {
check_fix( check_fix(

View file

@ -112,7 +112,8 @@ fn add_missing_ok_or_some(
let variant_name = if Some(expected_enum) == core_result { "Ok" } else { "Some" }; let variant_name = if Some(expected_enum) == core_result { "Ok" } else { "Some" };
let wrapped_actual_ty = expected_adt.ty_with_args(ctx.sema.db, &[d.actual.clone()]); let wrapped_actual_ty =
expected_adt.ty_with_args(ctx.sema.db, std::iter::once(d.actual.clone()));
if !d.expected.could_unify_with(ctx.sema.db, &wrapped_actual_ty) { if !d.expected.could_unify_with(ctx.sema.db, &wrapped_actual_ty) {
return None; return None;

View file

@ -1,14 +1,20 @@
use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, StructKind}; use hir::{
db::ExpandDatabase,
term_search::{term_search, TermSearchCtx},
ClosureStyle, HirDisplay,
};
use ide_db::{ use ide_db::{
assists::{Assist, AssistId, AssistKind, GroupLabel}, assists::{Assist, AssistId, AssistKind, GroupLabel},
label::Label, label::Label,
source_change::SourceChange, source_change::SourceChange,
}; };
use syntax::AstNode; use itertools::Itertools;
use text_edit::TextEdit; use text_edit::TextEdit;
use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
use syntax::AstNode;
// Diagnostic: typed-hole // Diagnostic: typed-hole
// //
// This diagnostic is triggered when an underscore expression is used in an invalid position. // This diagnostic is triggered when an underscore expression is used in an invalid position.
@ -36,50 +42,54 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist>
let (original_range, _) = let (original_range, _) =
d.expr.as_ref().map(|it| it.to_node(&root)).syntax().original_file_range_opt(db)?; d.expr.as_ref().map(|it| it.to_node(&root)).syntax().original_file_range_opt(db)?;
let scope = ctx.sema.scope(d.expr.value.to_node(&root).syntax())?; let scope = ctx.sema.scope(d.expr.value.to_node(&root).syntax())?;
let mut assists = vec![];
scope.process_all_names(&mut |name, def| { let term_search_ctx = TermSearchCtx {
let ty = match def { sema: &ctx.sema,
hir::ScopeDef::ModuleDef(it) => match it { scope: &scope,
hir::ModuleDef::Function(it) => it.ty(db), goal: d.expected.clone(),
hir::ModuleDef::Adt(hir::Adt::Struct(it)) if it.kind(db) != StructKind::Record => { config: Default::default(),
it.constructor_ty(db) };
} let paths = term_search(&term_search_ctx);
hir::ModuleDef::Variant(it) if it.kind(db) != StructKind::Record => {
it.constructor_ty(db) let mut formatter = |_: &hir::Type| String::from("_");
}
hir::ModuleDef::Const(it) => it.ty(db), let assists: Vec<Assist> = paths
hir::ModuleDef::Static(it) => it.ty(db), .into_iter()
_ => return, .filter_map(|path| {
}, path.gen_source_code(
hir::ScopeDef::GenericParam(hir::GenericParam::ConstParam(it)) => it.ty(db), &scope,
hir::ScopeDef::Local(it) => it.ty(db), &mut formatter,
_ => return, ctx.config.prefer_no_std,
}; ctx.config.prefer_prelude,
// FIXME: should also check coercions if it is at a coercion site )
if !ty.contains_unknown() && ty.could_unify_with(db, &d.expected) { .ok()
assists.push(Assist { })
id: AssistId("typed-hole", AssistKind::QuickFix), .unique()
label: Label::new(format!("Replace `_` with `{}`", name.display(db))), .map(|code| Assist {
group: Some(GroupLabel("Replace `_` with a matching entity in scope".to_owned())), id: AssistId("typed-hole", AssistKind::QuickFix),
target: original_range.range, label: Label::new(format!("Replace `_` with `{}`", &code)),
source_change: Some(SourceChange::from_text_edit( group: Some(GroupLabel("Replace `_` with a term".to_owned())),
original_range.file_id, target: original_range.range,
TextEdit::replace(original_range.range, name.display(db).to_string()), source_change: Some(SourceChange::from_text_edit(
)), original_range.file_id,
trigger_signature_help: false, TextEdit::replace(original_range.range, code),
}); )),
} trigger_signature_help: false,
}); })
if assists.is_empty() { .collect();
None
} else { if !assists.is_empty() {
Some(assists) Some(assists)
} else {
None
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::tests::{check_diagnostics, check_fixes}; use crate::tests::{
check_diagnostics, check_fixes_unordered, check_has_fix, check_has_single_fix,
};
#[test] #[test]
fn unknown() { fn unknown() {
@ -99,7 +109,7 @@ fn main() {
r#" r#"
fn main() { fn main() {
if _ {} if _ {}
//^ error: invalid `_` expression, expected type `bool` //^ 💡 error: invalid `_` expression, expected type `bool`
let _: fn() -> i32 = _; let _: fn() -> i32 = _;
//^ error: invalid `_` expression, expected type `fn() -> i32` //^ error: invalid `_` expression, expected type `fn() -> i32`
let _: fn() -> () = _; // FIXME: This should trigger an assist because `main` matches via *coercion* let _: fn() -> () = _; // FIXME: This should trigger an assist because `main` matches via *coercion*
@ -129,7 +139,7 @@ fn main() {
fn main() { fn main() {
let mut x = t(); let mut x = t();
x = _; x = _;
//^ 💡 error: invalid `_` expression, expected type `&str` //^ error: invalid `_` expression, expected type `&str`
x = ""; x = "";
} }
fn t<T>() -> T { loop {} } fn t<T>() -> T { loop {} }
@ -143,7 +153,8 @@ fn t<T>() -> T { loop {} }
r#" r#"
fn main() { fn main() {
let _x = [(); _]; let _x = [(); _];
let _y: [(); 10] = [(); _]; // FIXME: This should trigger error
// let _y: [(); 10] = [(); _];
_ = 0; _ = 0;
(_,) = (1,); (_,) = (1,);
} }
@ -153,7 +164,7 @@ fn main() {
#[test] #[test]
fn check_quick_fix() { fn check_quick_fix() {
check_fixes( check_fixes_unordered(
r#" r#"
enum Foo { enum Foo {
Bar Bar
@ -173,6 +184,18 @@ enum Foo {
} }
use Foo::Bar; use Foo::Bar;
const C: Foo = Foo::Bar; const C: Foo = Foo::Bar;
fn main<const CP: Foo>(param: Foo) {
let local = Foo::Bar;
let _: Foo = Bar;
//^ error: invalid `_` expression, expected type `fn()`
}
"#,
r#"
enum Foo {
Bar
}
use Foo::Bar;
const C: Foo = Foo::Bar;
fn main<const CP: Foo>(param: Foo) { fn main<const CP: Foo>(param: Foo) {
let local = Foo::Bar; let local = Foo::Bar;
let _: Foo = local; let _: Foo = local;
@ -209,18 +232,6 @@ enum Foo {
} }
use Foo::Bar; use Foo::Bar;
const C: Foo = Foo::Bar; const C: Foo = Foo::Bar;
fn main<const CP: Foo>(param: Foo) {
let local = Foo::Bar;
let _: Foo = Bar;
//^ error: invalid `_` expression, expected type `fn()`
}
"#,
r#"
enum Foo {
Bar
}
use Foo::Bar;
const C: Foo = Foo::Bar;
fn main<const CP: Foo>(param: Foo) { fn main<const CP: Foo>(param: Foo) {
let local = Foo::Bar; let local = Foo::Bar;
let _: Foo = C; let _: Foo = C;
@ -230,4 +241,153 @@ fn main<const CP: Foo>(param: Foo) {
], ],
); );
} }
#[test]
fn local_item_use_trait() {
check_has_fix(
r#"
struct Bar;
struct Baz;
trait Foo {
fn foo(self) -> Bar;
}
impl Foo for Baz {
fn foo(self) -> Bar {
unimplemented!()
}
}
fn asd() -> Bar {
let a = Baz;
_$0
}
"#,
r"
struct Bar;
struct Baz;
trait Foo {
fn foo(self) -> Bar;
}
impl Foo for Baz {
fn foo(self) -> Bar {
unimplemented!()
}
}
fn asd() -> Bar {
let a = Baz;
Foo::foo(a)
}
",
);
}
#[test]
fn init_struct() {
check_has_fix(
r#"struct Abc {}
struct Qwe { a: i32, b: Abc }
fn main() {
let a: i32 = 1;
let c: Qwe = _$0;
}"#,
r#"struct Abc {}
struct Qwe { a: i32, b: Abc }
fn main() {
let a: i32 = 1;
let c: Qwe = Qwe { a: a, b: Abc { } };
}"#,
);
}
#[test]
fn ignore_impl_func_with_incorrect_return() {
check_has_single_fix(
r#"
struct Bar {}
trait Foo {
type Res;
fn foo(&self) -> Self::Res;
}
impl Foo for i32 {
type Res = Self;
fn foo(&self) -> Self::Res { 1 }
}
fn main() {
let a: i32 = 1;
let c: Bar = _$0;
}"#,
r#"
struct Bar {}
trait Foo {
type Res;
fn foo(&self) -> Self::Res;
}
impl Foo for i32 {
type Res = Self;
fn foo(&self) -> Self::Res { 1 }
}
fn main() {
let a: i32 = 1;
let c: Bar = Bar { };
}"#,
);
}
#[test]
fn use_impl_func_with_correct_return() {
check_has_fix(
r#"
struct Bar {}
struct A;
trait Foo {
type Res;
fn foo(&self) -> Self::Res;
}
impl Foo for A {
type Res = Bar;
fn foo(&self) -> Self::Res { Bar { } }
}
fn main() {
let a = A;
let c: Bar = _$0;
}"#,
r#"
struct Bar {}
struct A;
trait Foo {
type Res;
fn foo(&self) -> Self::Res;
}
impl Foo for A {
type Res = Bar;
fn foo(&self) -> Self::Res { Bar { } }
}
fn main() {
let a = A;
let c: Bar = Foo::foo(&a);
}"#,
);
}
#[test]
fn local_shadow_fn() {
check_fixes_unordered(
r#"
fn f() {
let f: i32 = 0;
_$0
}"#,
vec![
r#"
fn f() {
let f: i32 = 0;
()
}"#,
r#"
fn f() {
let f: i32 = 0;
crate::f()
}"#,
],
);
}
} }

View file

@ -91,6 +91,91 @@ fn check_nth_fix_with_config(
assert_eq_text!(&after, &actual); assert_eq_text!(&after, &actual);
} }
pub(crate) fn check_fixes_unordered(ra_fixture_before: &str, ra_fixtures_after: Vec<&str>) {
for ra_fixture_after in ra_fixtures_after.iter() {
check_has_fix(ra_fixture_before, ra_fixture_after)
}
}
#[track_caller]
pub(crate) fn check_has_fix(ra_fixture_before: &str, ra_fixture_after: &str) {
let after = trim_indent(ra_fixture_after);
let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
let mut conf = DiagnosticsConfig::test_sample();
conf.expr_fill_default = ExprFillDefaultMode::Default;
let fix = super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id)
.into_iter()
.find(|d| {
d.fixes
.as_ref()
.and_then(|fixes| {
fixes.iter().find(|fix| {
if !fix.target.contains_inclusive(file_position.offset) {
return false;
}
let actual = {
let source_change = fix.source_change.as_ref().unwrap();
let file_id = *source_change.source_file_edits.keys().next().unwrap();
let mut actual = db.file_text(file_id).to_string();
for (edit, snippet_edit) in source_change.source_file_edits.values() {
edit.apply(&mut actual);
if let Some(snippet_edit) = snippet_edit {
snippet_edit.apply(&mut actual);
}
}
actual
};
after == actual
})
})
.is_some()
});
assert!(fix.is_some(), "no diagnostic with desired fix");
}
#[track_caller]
pub(crate) fn check_has_single_fix(ra_fixture_before: &str, ra_fixture_after: &str) {
let after = trim_indent(ra_fixture_after);
let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
let mut conf = DiagnosticsConfig::test_sample();
conf.expr_fill_default = ExprFillDefaultMode::Default;
let mut n_fixes = 0;
let fix = super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id)
.into_iter()
.find(|d| {
d.fixes
.as_ref()
.and_then(|fixes| {
n_fixes += fixes.len();
fixes.iter().find(|fix| {
if !fix.target.contains_inclusive(file_position.offset) {
return false;
}
let actual = {
let source_change = fix.source_change.as_ref().unwrap();
let file_id = *source_change.source_file_edits.keys().next().unwrap();
let mut actual = db.file_text(file_id).to_string();
for (edit, snippet_edit) in source_change.source_file_edits.values() {
edit.apply(&mut actual);
if let Some(snippet_edit) = snippet_edit {
snippet_edit.apply(&mut actual);
}
}
actual
};
after == actual
})
})
.is_some()
});
assert!(fix.is_some(), "no diagnostic with desired fix");
assert!(n_fixes == 1, "Too many fixes suggested");
}
/// Checks that there's a diagnostic *without* fix at `$0`. /// Checks that there's a diagnostic *without* fix at `$0`.
pub(crate) fn check_no_fix(ra_fixture: &str) { pub(crate) fn check_no_fix(ra_fixture: &str) {
let (db, file_position) = RootDatabase::with_position(ra_fixture); let (db, file_position) = RootDatabase::with_position(ra_fixture);

View file

@ -501,7 +501,7 @@ fn get_doc_base_urls(
let Some(krate) = def.krate(db) else { return Default::default() }; let Some(krate) = def.krate(db) else { return Default::default() };
let Some(display_name) = krate.display_name(db) else { return Default::default() }; let Some(display_name) = krate.display_name(db) else { return Default::default() };
let crate_data = &db.crate_graph()[krate.into()]; let crate_data = &db.crate_graph()[krate.into()];
let channel = crate_data.channel().unwrap_or(ReleaseChannel::Nightly).as_str(); let channel = db.toolchain_channel(krate.into()).unwrap_or(ReleaseChannel::Nightly).as_str();
let (web_base, local_base) = match &crate_data.origin { let (web_base, local_base) = match &crate_data.origin {
// std and co do not specify `html_root_url` any longer so we gotta handwrite this ourself. // std and co do not specify `html_root_url` any longer so we gotta handwrite this ourself.

View file

@ -7263,8 +7263,8 @@ impl Iterator for S {
file_id: FileId( file_id: FileId(
1, 1,
), ),
full_range: 6157..6365, full_range: 6290..6498,
focus_range: 6222..6228, focus_range: 6355..6361,
name: "Future", name: "Future",
kind: Trait, kind: Trait,
container_name: "future", container_name: "future",
@ -7277,8 +7277,8 @@ impl Iterator for S {
file_id: FileId( file_id: FileId(
1, 1,
), ),
full_range: 6995..7461, full_range: 7128..7594,
focus_range: 7039..7047, focus_range: 7172..7180,
name: "Iterator", name: "Iterator",
kind: Trait, kind: Trait,
container_name: "iterator", container_name: "iterator",

View file

@ -12,11 +12,6 @@
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#![recursion_limit = "128"] #![recursion_limit = "128"]
#[allow(unused)]
macro_rules! eprintln {
($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
}
#[cfg(test)] #[cfg(test)]
mod fixture; mod fixture;
@ -258,11 +253,11 @@ impl Analysis {
Env::default(), Env::default(),
false, false,
CrateOrigin::Local { repo: None, name: None }, CrateOrigin::Local { repo: None, name: None },
Err("Analysis::from_single_file has no target layout".into()),
None,
); );
change.change_file(file_id, Some(Arc::from(text))); change.change_file(file_id, Some(Arc::from(text)));
change.set_crate_graph(crate_graph); change.set_crate_graph(crate_graph);
change.set_target_data_layouts(vec![Err("fixture has no layout".into())]);
change.set_toolchains(vec![None]);
host.apply_change(change); host.apply_change(change);
(host.analysis(), file_id) (host.analysis(), file_id)
} }
@ -680,9 +675,8 @@ impl Analysis {
&self, &self,
position: FilePosition, position: FilePosition,
new_name: &str, new_name: &str,
rename_external: bool,
) -> Cancellable<Result<SourceChange, RenameError>> { ) -> Cancellable<Result<SourceChange, RenameError>> {
self.with_db(|db| rename::rename(db, position, new_name, rename_external)) self.with_db(|db| rename::rename(db, position, new_name))
} }
pub fn prepare_rename( pub fn prepare_rename(

View file

@ -54,7 +54,7 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
} }
} }
/// Returns `Vec` for the same reason as `parent_module` /// This returns `Vec` because a module may be included from several places.
pub(crate) fn crates_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> { pub(crate) fn crates_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> {
db.relevant_crates(file_id) db.relevant_crates(file_id)
.iter() .iter()

Some files were not shown because too many files have changed in this diff Show more