mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 13:33:31 +00:00
Merge commit '37f84c101bca43b11027f30ab0c2852f9325bc3d' into sync-from-ra
This commit is contained in:
parent
6502421771
commit
4704881b64
311 changed files with 13700 additions and 9110 deletions
|
@ -7,13 +7,10 @@ trim_trailing_whitespace = true
|
||||||
end_of_line = lf
|
end_of_line = lf
|
||||||
insert_final_newline = true
|
insert_final_newline = true
|
||||||
indent_style = space
|
indent_style = space
|
||||||
|
|
||||||
[*.{rs,toml}]
|
|
||||||
indent_size = 4
|
indent_size = 4
|
||||||
|
|
||||||
[*.ts]
|
[*.md]
|
||||||
indent_size = 4
|
indent_size = 2
|
||||||
[*.js]
|
|
||||||
indent_size = 4
|
[*.{yml, yaml}]
|
||||||
[*.json]
|
indent_size = 2
|
||||||
indent_size = 4
|
|
||||||
|
|
2
.github/workflows/autopublish.yaml
vendored
2
.github/workflows/autopublish.yaml
vendored
|
@ -49,8 +49,8 @@ jobs:
|
||||||
cargo workspaces rename --from project-model project_model
|
cargo workspaces rename --from project-model project_model
|
||||||
cargo workspaces rename --from test-utils test_utils
|
cargo workspaces rename --from test-utils test_utils
|
||||||
cargo workspaces rename --from text-edit text_edit
|
cargo workspaces rename --from text-edit text_edit
|
||||||
cargo workspaces rename ra_ap_%n
|
|
||||||
# Remove library crates from the workspaces so we don't auto-publish them as well
|
# Remove library crates from the workspaces so we don't auto-publish them as well
|
||||||
sed -i 's/ "lib\/\*",//' ./Cargo.toml
|
sed -i 's/ "lib\/\*",//' ./Cargo.toml
|
||||||
|
cargo workspaces rename ra_ap_%n
|
||||||
find crates/rust-analyzer -type f -name '*.rs' -exec sed -i 's/rust_analyzer/ra_ap_rust_analyzer/g' {} +
|
find crates/rust-analyzer -type f -name '*.rs' -exec sed -i 's/rust_analyzer/ra_ap_rust_analyzer/g' {} +
|
||||||
cargo workspaces publish --yes --force '*' --exact --no-git-commit --allow-dirty --skip-published custom 0.0.$(($RUN_NUMBER + 133))
|
cargo workspaces publish --yes --force '*' --exact --no-git-commit --allow-dirty --skip-published custom 0.0.$(($RUN_NUMBER + 133))
|
||||||
|
|
127
.github/workflows/metrics.yaml
vendored
127
.github/workflows/metrics.yaml
vendored
|
@ -11,20 +11,135 @@ env:
|
||||||
RUSTUP_MAX_RETRIES: 10
|
RUSTUP_MAX_RETRIES: 10
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
metrics:
|
setup_cargo:
|
||||||
if: github.repository == 'rust-lang/rust-analyzer'
|
if: github.repository == 'rust-lang/rust-analyzer'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Install Rust toolchain
|
||||||
|
run: |
|
||||||
|
rustup update --no-self-update stable
|
||||||
|
rustup component add rustfmt rust-src
|
||||||
|
rustup default stable
|
||||||
|
- name: Cache cargo
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.cargo/bin/
|
||||||
|
~/.cargo/registry/index/
|
||||||
|
~/.cargo/registry/cache/
|
||||||
|
~/.cargo/git/db/
|
||||||
|
key: ${{ runner.os }}-cargo-${{ github.sha }}
|
||||||
|
|
||||||
|
build_metrics:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: setup_cargo
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Install Rust toolchain
|
- name: Restore cargo cache
|
||||||
run: |
|
uses: actions/cache@v3
|
||||||
rustup update --no-self-update stable
|
with:
|
||||||
rustup component add rustfmt rust-src
|
path: |
|
||||||
|
~/.cargo/bin/
|
||||||
|
~/.cargo/registry/index/
|
||||||
|
~/.cargo/registry/cache/
|
||||||
|
~/.cargo/git/db/
|
||||||
|
key: ${{ runner.os }}-cargo-${{ github.sha }}
|
||||||
|
|
||||||
|
- name: Collect build metrics
|
||||||
|
run: cargo xtask metrics build
|
||||||
|
|
||||||
|
- name: Cache target
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: target/
|
||||||
|
key: ${{ runner.os }}-target-${{ github.sha }}
|
||||||
|
|
||||||
|
- name: Upload build metrics
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: build-${{ github.sha }}
|
||||||
|
path: target/build.json
|
||||||
|
if-no-files-found: error
|
||||||
|
|
||||||
|
other_metrics:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
names: [self, ripgrep, webrender, diesel]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [setup_cargo, build_metrics]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Restore cargo cache
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.cargo/bin/
|
||||||
|
~/.cargo/registry/index/
|
||||||
|
~/.cargo/registry/cache/
|
||||||
|
~/.cargo/git/db/
|
||||||
|
key: ${{ runner.os }}-cargo-${{ github.sha }}
|
||||||
|
|
||||||
|
- name: Restore target cache
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: target/
|
||||||
|
key: ${{ runner.os }}-target-${{ github.sha }}
|
||||||
|
|
||||||
- name: Collect metrics
|
- name: Collect metrics
|
||||||
run: cargo xtask metrics
|
run: cargo xtask metrics ${{ matrix.names }}
|
||||||
|
|
||||||
|
- name: Upload metrics
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: ${{ matrix.names }}-${{ github.sha }}
|
||||||
|
path: target/${{ matrix.names }}.json
|
||||||
|
if-no-files-found: error
|
||||||
|
|
||||||
|
generate_final_metrics:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [build_metrics, other_metrics]
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Download build metrics
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: build-${{ github.sha }}
|
||||||
|
|
||||||
|
- name: Download self metrics
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: self-${{ github.sha }}
|
||||||
|
|
||||||
|
- name: Download ripgrep metrics
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: ripgrep-${{ github.sha }}
|
||||||
|
|
||||||
|
- name: Download webrender metrics
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: webrender-${{ github.sha }}
|
||||||
|
|
||||||
|
- name: Download diesel metrics
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: diesel-${{ github.sha }}
|
||||||
|
|
||||||
|
- name: Combine json
|
||||||
|
run: |
|
||||||
|
git clone --depth 1 https://$METRICS_TOKEN@github.com/rust-analyzer/metrics.git
|
||||||
|
jq -s ".[0] * .[1] * .[2] * .[3] * .[4]" build.json self.json ripgrep.json webrender.json diesel.json -c >> metrics/metrics.json
|
||||||
|
cd metrics
|
||||||
|
git add .
|
||||||
|
git -c user.name=Bot -c user.email=dummy@example.com commit --message 📈
|
||||||
|
git push origin master
|
||||||
env:
|
env:
|
||||||
METRICS_TOKEN: ${{ secrets.METRICS_TOKEN }}
|
METRICS_TOKEN: ${{ secrets.METRICS_TOKEN }}
|
||||||
|
|
474
Cargo.lock
generated
474
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
24
Cargo.toml
24
Cargo.toml
|
@ -4,7 +4,7 @@ exclude = ["crates/proc-macro-test/imp"]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
rust-version = "1.66"
|
rust-version = "1.70"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
authors = ["rust-analyzer team"]
|
authors = ["rust-analyzer team"]
|
||||||
|
@ -35,6 +35,10 @@ debug = 0
|
||||||
# chalk-ir = { path = "../chalk/chalk-ir" }
|
# chalk-ir = { path = "../chalk/chalk-ir" }
|
||||||
# chalk-recursive = { path = "../chalk/chalk-recursive" }
|
# chalk-recursive = { path = "../chalk/chalk-recursive" }
|
||||||
# chalk-derive = { path = "../chalk/chalk-derive" }
|
# chalk-derive = { path = "../chalk/chalk-derive" }
|
||||||
|
# line-index = { path = "lib/line-index" }
|
||||||
|
# la-arena = { path = "lib/la-arena" }
|
||||||
|
# lsp-server = { path = "lib/lsp-server" }
|
||||||
|
|
||||||
|
|
||||||
# ungrammar = { path = "../ungrammar" }
|
# ungrammar = { path = "../ungrammar" }
|
||||||
|
|
||||||
|
@ -57,13 +61,13 @@ ide-diagnostics = { path = "./crates/ide-diagnostics", version = "0.0.0" }
|
||||||
ide-ssr = { path = "./crates/ide-ssr", version = "0.0.0" }
|
ide-ssr = { path = "./crates/ide-ssr", version = "0.0.0" }
|
||||||
intern = { path = "./crates/intern", version = "0.0.0" }
|
intern = { path = "./crates/intern", version = "0.0.0" }
|
||||||
limit = { path = "./crates/limit", version = "0.0.0" }
|
limit = { path = "./crates/limit", version = "0.0.0" }
|
||||||
|
load-cargo = { path = "./crates/load-cargo", version = "0.0.0" }
|
||||||
mbe = { path = "./crates/mbe", version = "0.0.0" }
|
mbe = { path = "./crates/mbe", version = "0.0.0" }
|
||||||
parser = { path = "./crates/parser", version = "0.0.0" }
|
parser = { path = "./crates/parser", version = "0.0.0" }
|
||||||
paths = { path = "./crates/paths", version = "0.0.0" }
|
paths = { path = "./crates/paths", version = "0.0.0" }
|
||||||
proc-macro-api = { path = "./crates/proc-macro-api", version = "0.0.0" }
|
proc-macro-api = { path = "./crates/proc-macro-api", version = "0.0.0" }
|
||||||
proc-macro-srv = { path = "./crates/proc-macro-srv", version = "0.0.0" }
|
proc-macro-srv = { path = "./crates/proc-macro-srv", version = "0.0.0" }
|
||||||
proc-macro-srv-cli = { path = "./crates/proc-macro-srv-cli", version = "0.0.0" }
|
proc-macro-srv-cli = { path = "./crates/proc-macro-srv-cli", version = "0.0.0" }
|
||||||
proc-macro-test = { path = "./crates/proc-macro-test", version = "0.0.0" }
|
|
||||||
profile = { path = "./crates/profile", version = "0.0.0" }
|
profile = { path = "./crates/profile", version = "0.0.0" }
|
||||||
project-model = { path = "./crates/project-model", version = "0.0.0" }
|
project-model = { path = "./crates/project-model", version = "0.0.0" }
|
||||||
sourcegen = { path = "./crates/sourcegen", version = "0.0.0" }
|
sourcegen = { path = "./crates/sourcegen", version = "0.0.0" }
|
||||||
|
@ -75,7 +79,14 @@ toolchain = { path = "./crates/toolchain", version = "0.0.0" }
|
||||||
tt = { path = "./crates/tt", version = "0.0.0" }
|
tt = { path = "./crates/tt", version = "0.0.0" }
|
||||||
vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
|
vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
|
||||||
vfs = { path = "./crates/vfs", version = "0.0.0" }
|
vfs = { path = "./crates/vfs", version = "0.0.0" }
|
||||||
line-index = { version = "0.1.0-pre.1", path = "./lib/line-index" }
|
|
||||||
|
# local crates that aren't published to crates.io. These should not have versions.
|
||||||
|
proc-macro-test = { path = "./crates/proc-macro-test" }
|
||||||
|
|
||||||
|
# In-tree crates that are published separately and follow semver. See lib/README.md
|
||||||
|
line-index = { version = "0.1.0-pre.1" }
|
||||||
|
la-arena = { version = "0.3.1" }
|
||||||
|
lsp-server = { version = "0.7.1" }
|
||||||
|
|
||||||
# non-local crates
|
# non-local crates
|
||||||
smallvec = { version = "1.10.0", features = [
|
smallvec = { version = "1.10.0", features = [
|
||||||
|
@ -86,9 +97,10 @@ smallvec = { version = "1.10.0", features = [
|
||||||
smol_str = "0.2.0"
|
smol_str = "0.2.0"
|
||||||
nohash-hasher = "0.2.0"
|
nohash-hasher = "0.2.0"
|
||||||
text-size = "1.1.0"
|
text-size = "1.1.0"
|
||||||
# the following crates are pinned to prevent us from pulling in syn 2 until all our dependencies have moved
|
serde = { version = "1.0.156", features = ["derive"] }
|
||||||
serde = { version = "=1.0.156", features = ["derive"] }
|
serde_json = "1.0.96"
|
||||||
serde_json = "1.0.94"
|
|
||||||
triomphe = { version = "0.1.8", default-features = false, features = ["std"] }
|
triomphe = { version = "0.1.8", default-features = false, features = ["std"] }
|
||||||
|
# can't upgrade due to dashmap depending on 0.12.3 currently
|
||||||
|
hashbrown = { version = "0.12.3", features = ["inline-more"], default-features = false }
|
||||||
|
|
||||||
rustc_lexer = { version = "0.1.0", package = "ra-ap-rustc_lexer" }
|
rustc_lexer = { version = "0.1.0", package = "ra-ap-rustc_lexer" }
|
||||||
|
|
|
@ -17,7 +17,7 @@ rustc-hash = "1.1.0"
|
||||||
|
|
||||||
triomphe.workspace = true
|
triomphe.workspace = true
|
||||||
|
|
||||||
la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
|
la-arena.workspace = true
|
||||||
|
|
||||||
# local deps
|
# local deps
|
||||||
cfg.workspace = true
|
cfg.workspace = true
|
||||||
|
|
|
@ -26,7 +26,7 @@ pub trait WithFixture: Default + SourceDatabaseExt + 'static {
|
||||||
let fixture = ChangeFixture::parse(ra_fixture);
|
let fixture = ChangeFixture::parse(ra_fixture);
|
||||||
let mut db = Self::default();
|
let mut db = Self::default();
|
||||||
fixture.change.apply(&mut db);
|
fixture.change.apply(&mut db);
|
||||||
assert_eq!(fixture.files.len(), 1);
|
assert_eq!(fixture.files.len(), 1, "Multiple file found in the fixture");
|
||||||
(db, fixture.files[0])
|
(db, fixture.files[0])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -102,6 +102,8 @@ pub struct ChangeFixture {
|
||||||
pub change: Change,
|
pub change: Change,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const SOURCE_ROOT_PREFIX: &str = "/";
|
||||||
|
|
||||||
impl ChangeFixture {
|
impl ChangeFixture {
|
||||||
pub fn parse(ra_fixture: &str) -> ChangeFixture {
|
pub fn parse(ra_fixture: &str) -> ChangeFixture {
|
||||||
Self::parse_with_proc_macros(ra_fixture, Vec::new())
|
Self::parse_with_proc_macros(ra_fixture, Vec::new())
|
||||||
|
@ -131,7 +133,6 @@ impl ChangeFixture {
|
||||||
|
|
||||||
let mut file_set = FileSet::default();
|
let mut file_set = FileSet::default();
|
||||||
let mut current_source_root_kind = SourceRootKind::Local;
|
let mut current_source_root_kind = SourceRootKind::Local;
|
||||||
let source_root_prefix = "/".to_string();
|
|
||||||
let mut file_id = FileId(0);
|
let mut file_id = FileId(0);
|
||||||
let mut roots = Vec::new();
|
let mut roots = Vec::new();
|
||||||
|
|
||||||
|
@ -151,19 +152,23 @@ impl ChangeFixture {
|
||||||
entry.text.clone()
|
entry.text.clone()
|
||||||
};
|
};
|
||||||
|
|
||||||
let meta = FileMeta::from(entry);
|
let meta = FileMeta::from_fixture(entry, current_source_root_kind);
|
||||||
assert!(meta.path.starts_with(&source_root_prefix));
|
assert!(meta.path.starts_with(SOURCE_ROOT_PREFIX));
|
||||||
if !meta.deps.is_empty() {
|
if !meta.deps.is_empty() {
|
||||||
assert!(meta.krate.is_some(), "can't specify deps without naming the crate")
|
assert!(meta.krate.is_some(), "can't specify deps without naming the crate")
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(kind) = &meta.introduce_new_source_root {
|
if let Some(kind) = meta.introduce_new_source_root {
|
||||||
let root = match current_source_root_kind {
|
assert!(
|
||||||
|
meta.krate.is_some(),
|
||||||
|
"new_source_root meta doesn't make sense without crate meta"
|
||||||
|
);
|
||||||
|
let prev_kind = mem::replace(&mut current_source_root_kind, kind);
|
||||||
|
let prev_root = match prev_kind {
|
||||||
SourceRootKind::Local => SourceRoot::new_local(mem::take(&mut file_set)),
|
SourceRootKind::Local => SourceRoot::new_local(mem::take(&mut file_set)),
|
||||||
SourceRootKind::Library => SourceRoot::new_library(mem::take(&mut file_set)),
|
SourceRootKind::Library => SourceRoot::new_library(mem::take(&mut file_set)),
|
||||||
};
|
};
|
||||||
roots.push(root);
|
roots.push(prev_root);
|
||||||
current_source_root_kind = *kind;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((krate, origin, version)) = meta.krate {
|
if let Some((krate, origin, version)) = meta.krate {
|
||||||
|
@ -185,7 +190,7 @@ impl ChangeFixture {
|
||||||
Some(toolchain),
|
Some(toolchain),
|
||||||
);
|
);
|
||||||
let prev = crates.insert(crate_name.clone(), crate_id);
|
let prev = crates.insert(crate_name.clone(), crate_id);
|
||||||
assert!(prev.is_none());
|
assert!(prev.is_none(), "multiple crates with same name: {}", crate_name);
|
||||||
for dep in meta.deps {
|
for dep in meta.deps {
|
||||||
let prelude = meta.extern_prelude.contains(&dep);
|
let prelude = meta.extern_prelude.contains(&dep);
|
||||||
let dep = CrateName::normalize_dashes(&dep);
|
let dep = CrateName::normalize_dashes(&dep);
|
||||||
|
@ -219,7 +224,7 @@ impl ChangeFixture {
|
||||||
false,
|
false,
|
||||||
CrateOrigin::Local { repo: None, name: None },
|
CrateOrigin::Local { repo: None, name: None },
|
||||||
default_target_data_layout
|
default_target_data_layout
|
||||||
.map(|x| x.into())
|
.map(|it| it.into())
|
||||||
.ok_or_else(|| "target_data_layout unset".into()),
|
.ok_or_else(|| "target_data_layout unset".into()),
|
||||||
Some(toolchain),
|
Some(toolchain),
|
||||||
);
|
);
|
||||||
|
@ -442,51 +447,74 @@ struct FileMeta {
|
||||||
target_data_layout: Option<String>,
|
target_data_layout: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_crate(crate_str: String) -> (String, CrateOrigin, Option<String>) {
|
impl FileMeta {
|
||||||
if let Some((a, b)) = crate_str.split_once('@') {
|
fn from_fixture(f: Fixture, current_source_root_kind: SourceRootKind) -> Self {
|
||||||
let (version, origin) = match b.split_once(':') {
|
let mut cfg = CfgOptions::default();
|
||||||
Some(("CratesIo", data)) => match data.split_once(',') {
|
for (k, v) in f.cfgs {
|
||||||
Some((version, url)) => {
|
if let Some(v) = v {
|
||||||
(version, CrateOrigin::Local { repo: Some(url.to_owned()), name: None })
|
cfg.insert_key_value(k.into(), v.into());
|
||||||
}
|
|
||||||
_ => panic!("Bad crates.io parameter: {data}"),
|
|
||||||
},
|
|
||||||
_ => panic!("Bad string for crate origin: {b}"),
|
|
||||||
};
|
|
||||||
(a.to_owned(), origin, Some(version.to_string()))
|
|
||||||
} else {
|
} else {
|
||||||
let crate_origin = match LangCrateOrigin::from(&*crate_str) {
|
cfg.insert_atom(k.into());
|
||||||
LangCrateOrigin::Other => CrateOrigin::Local { repo: None, name: None },
|
|
||||||
origin => CrateOrigin::Lang(origin),
|
|
||||||
};
|
|
||||||
(crate_str, crate_origin, None)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Fixture> for FileMeta {
|
let introduce_new_source_root = f.introduce_new_source_root.map(|kind| match &*kind {
|
||||||
fn from(f: Fixture) -> FileMeta {
|
|
||||||
let mut cfg = CfgOptions::default();
|
|
||||||
f.cfg_atoms.iter().for_each(|it| cfg.insert_atom(it.into()));
|
|
||||||
f.cfg_key_values.iter().for_each(|(k, v)| cfg.insert_key_value(k.into(), v.into()));
|
|
||||||
let deps = f.deps;
|
|
||||||
FileMeta {
|
|
||||||
path: f.path,
|
|
||||||
krate: f.krate.map(parse_crate),
|
|
||||||
extern_prelude: f.extern_prelude.unwrap_or_else(|| deps.clone()),
|
|
||||||
deps,
|
|
||||||
cfg,
|
|
||||||
edition: f.edition.as_ref().map_or(Edition::CURRENT, |v| Edition::from_str(v).unwrap()),
|
|
||||||
env: f.env.into_iter().collect(),
|
|
||||||
introduce_new_source_root: f.introduce_new_source_root.map(|kind| match &*kind {
|
|
||||||
"local" => SourceRootKind::Local,
|
"local" => SourceRootKind::Local,
|
||||||
"library" => SourceRootKind::Library,
|
"library" => SourceRootKind::Library,
|
||||||
invalid => panic!("invalid source root kind '{invalid}'"),
|
invalid => panic!("invalid source root kind '{invalid}'"),
|
||||||
}),
|
});
|
||||||
|
let current_source_root_kind =
|
||||||
|
introduce_new_source_root.unwrap_or(current_source_root_kind);
|
||||||
|
|
||||||
|
let deps = f.deps;
|
||||||
|
Self {
|
||||||
|
path: f.path,
|
||||||
|
krate: f.krate.map(|it| parse_crate(it, current_source_root_kind, f.library)),
|
||||||
|
extern_prelude: f.extern_prelude.unwrap_or_else(|| deps.clone()),
|
||||||
|
deps,
|
||||||
|
cfg,
|
||||||
|
edition: f.edition.map_or(Edition::CURRENT, |v| Edition::from_str(&v).unwrap()),
|
||||||
|
env: f.env.into_iter().collect(),
|
||||||
|
introduce_new_source_root,
|
||||||
target_data_layout: f.target_data_layout,
|
target_data_layout: f.target_data_layout,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn parse_crate(
|
||||||
|
crate_str: String,
|
||||||
|
current_source_root_kind: SourceRootKind,
|
||||||
|
explicit_non_workspace_member: bool,
|
||||||
|
) -> (String, CrateOrigin, Option<String>) {
|
||||||
|
// syntax:
|
||||||
|
// "my_awesome_crate"
|
||||||
|
// "my_awesome_crate@0.0.1,http://example.com"
|
||||||
|
let (name, repo, version) = if let Some((name, remain)) = crate_str.split_once('@') {
|
||||||
|
let (version, repo) =
|
||||||
|
remain.split_once(',').expect("crate meta: found '@' without version and url");
|
||||||
|
(name.to_owned(), Some(repo.to_owned()), Some(version.to_owned()))
|
||||||
|
} else {
|
||||||
|
(crate_str, None, None)
|
||||||
|
};
|
||||||
|
|
||||||
|
let non_workspace_member = explicit_non_workspace_member
|
||||||
|
|| matches!(current_source_root_kind, SourceRootKind::Library);
|
||||||
|
|
||||||
|
let origin = match LangCrateOrigin::from(&*name) {
|
||||||
|
LangCrateOrigin::Other => {
|
||||||
|
let name = name.clone();
|
||||||
|
if non_workspace_member {
|
||||||
|
CrateOrigin::Library { repo, name }
|
||||||
|
} else {
|
||||||
|
CrateOrigin::Local { repo, name: Some(name) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
origin => CrateOrigin::Lang(origin),
|
||||||
|
};
|
||||||
|
|
||||||
|
(name, origin, version)
|
||||||
|
}
|
||||||
|
|
||||||
// Identity mapping
|
// Identity mapping
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct IdentityProcMacroExpander;
|
struct IdentityProcMacroExpander;
|
||||||
|
|
|
@ -138,12 +138,12 @@ impl ops::Deref for CrateName {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Origin of the crates. It is used in emitting monikers.
|
/// Origin of the crates.
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
pub enum CrateOrigin {
|
pub enum CrateOrigin {
|
||||||
/// Crates that are from the rustc workspace
|
/// Crates that are from the rustc workspace.
|
||||||
Rustc { name: String },
|
Rustc { name: String },
|
||||||
/// Crates that are workspace members,
|
/// Crates that are workspace members.
|
||||||
Local { repo: Option<String>, name: Option<String> },
|
Local { repo: Option<String>, name: Option<String> },
|
||||||
/// Crates that are non member libraries.
|
/// Crates that are non member libraries.
|
||||||
Library { repo: Option<String>, name: String },
|
Library { repo: Option<String>, name: String },
|
||||||
|
|
|
@ -18,13 +18,13 @@ rustc-hash = "1.1.0"
|
||||||
tt.workspace = true
|
tt.workspace = true
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
expect-test = "1.4.0"
|
expect-test = "1.4.1"
|
||||||
oorandom = "11.1.3"
|
oorandom = "11.1.3"
|
||||||
# We depend on both individually instead of using `features = ["derive"]` to microoptimize the
|
# We depend on both individually instead of using `features = ["derive"]` to microoptimize the
|
||||||
# build graph: if the feature was enabled, syn would be built early on in the graph if `smolstr`
|
# build graph: if the feature was enabled, syn would be built early on in the graph if `smolstr`
|
||||||
# supports `arbitrary`. This way, we avoid feature unification.
|
# supports `arbitrary`. This way, we avoid feature unification.
|
||||||
arbitrary = "1.2.2"
|
arbitrary = "1.3.0"
|
||||||
derive_arbitrary = "1.2.2"
|
derive_arbitrary = "1.3.1"
|
||||||
|
|
||||||
# local deps
|
# local deps
|
||||||
mbe.workspace = true
|
mbe.workspace = true
|
||||||
|
|
|
@ -69,7 +69,7 @@ impl CfgOptions {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_cfg_keys(&self) -> impl Iterator<Item = &SmolStr> {
|
pub fn get_cfg_keys(&self) -> impl Iterator<Item = &SmolStr> {
|
||||||
self.enabled.iter().map(|x| match x {
|
self.enabled.iter().map(|it| match it {
|
||||||
CfgAtom::Flag(key) => key,
|
CfgAtom::Flag(key) => key,
|
||||||
CfgAtom::KeyValue { key, .. } => key,
|
CfgAtom::KeyValue { key, .. } => key,
|
||||||
})
|
})
|
||||||
|
@ -79,7 +79,7 @@ impl CfgOptions {
|
||||||
&'a self,
|
&'a self,
|
||||||
cfg_key: &'a str,
|
cfg_key: &'a str,
|
||||||
) -> impl Iterator<Item = &'a SmolStr> + 'a {
|
) -> impl Iterator<Item = &'a SmolStr> + 'a {
|
||||||
self.enabled.iter().filter_map(move |x| match x {
|
self.enabled.iter().filter_map(move |it| match it {
|
||||||
CfgAtom::KeyValue { key, value } if cfg_key == key => Some(value),
|
CfgAtom::KeyValue { key, value } if cfg_key == key => Some(value),
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
|
|
|
@ -12,9 +12,9 @@ rust-version.workspace = true
|
||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
crossbeam-channel = "0.5.5"
|
crossbeam-channel = "0.5.8"
|
||||||
tracing = "0.1.37"
|
tracing = "0.1.37"
|
||||||
cargo_metadata = "0.15.0"
|
cargo_metadata = "0.15.4"
|
||||||
rustc-hash = "1.1.0"
|
rustc-hash = "1.1.0"
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
|
|
|
@ -21,14 +21,14 @@ dashmap = { version = "=5.4.0", features = ["raw-api"] }
|
||||||
drop_bomb = "0.1.5"
|
drop_bomb = "0.1.5"
|
||||||
either = "1.7.0"
|
either = "1.7.0"
|
||||||
fst = { version = "0.4.7", default-features = false }
|
fst = { version = "0.4.7", default-features = false }
|
||||||
hashbrown = { version = "0.12.1", default-features = false }
|
indexmap = "2.0.0"
|
||||||
indexmap = "1.9.1"
|
|
||||||
itertools = "0.10.5"
|
itertools = "0.10.5"
|
||||||
la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
|
la-arena.workspace = true
|
||||||
once_cell = "1.17.0"
|
once_cell = "1.17.0"
|
||||||
rustc-hash = "1.1.0"
|
rustc-hash = "1.1.0"
|
||||||
smallvec.workspace = true
|
|
||||||
tracing = "0.1.35"
|
tracing = "0.1.35"
|
||||||
|
smallvec.workspace = true
|
||||||
|
hashbrown.workspace = true
|
||||||
triomphe.workspace = true
|
triomphe.workspace = true
|
||||||
|
|
||||||
rustc_abi = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_abi", default-features = false }
|
rustc_abi = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_abi", default-features = false }
|
||||||
|
|
|
@ -137,7 +137,10 @@ impl Attrs {
|
||||||
|
|
||||||
let cfg_options = &crate_graph[krate].cfg_options;
|
let cfg_options = &crate_graph[krate].cfg_options;
|
||||||
|
|
||||||
let Some(variant) = enum_.variants.clone().filter(|variant| {
|
let Some(variant) = enum_
|
||||||
|
.variants
|
||||||
|
.clone()
|
||||||
|
.filter(|variant| {
|
||||||
let attrs = item_tree.attrs(db, krate, (*variant).into());
|
let attrs = item_tree.attrs(db, krate, (*variant).into());
|
||||||
attrs.is_cfg_enabled(cfg_options)
|
attrs.is_cfg_enabled(cfg_options)
|
||||||
})
|
})
|
||||||
|
@ -272,6 +275,25 @@ impl Attrs {
|
||||||
self.by_key("proc_macro_derive").exists()
|
self.by_key("proc_macro_derive").exists()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_test(&self) -> bool {
|
||||||
|
self.iter().any(|it| {
|
||||||
|
it.path()
|
||||||
|
.segments()
|
||||||
|
.iter()
|
||||||
|
.rev()
|
||||||
|
.zip(["core", "prelude", "v1", "test"].iter().rev())
|
||||||
|
.all(|it| it.0.as_str() == Some(it.1))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_ignore(&self) -> bool {
|
||||||
|
self.by_key("ignore").exists()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_bench(&self) -> bool {
|
||||||
|
self.by_key("bench").exists()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn is_unstable(&self) -> bool {
|
pub fn is_unstable(&self) -> bool {
|
||||||
self.by_key("unstable").exists()
|
self.by_key("unstable").exists()
|
||||||
}
|
}
|
||||||
|
@ -282,7 +304,7 @@ use std::slice::Iter as SliceIter;
|
||||||
pub enum DocAtom {
|
pub enum DocAtom {
|
||||||
/// eg. `#[doc(hidden)]`
|
/// eg. `#[doc(hidden)]`
|
||||||
Flag(SmolStr),
|
Flag(SmolStr),
|
||||||
/// eg. `#[doc(alias = "x")]`
|
/// eg. `#[doc(alias = "it")]`
|
||||||
///
|
///
|
||||||
/// Note that a key can have multiple values that are all considered "active" at the same time.
|
/// Note that a key can have multiple values that are all considered "active" at the same time.
|
||||||
/// For example, `#[doc(alias = "x")]` and `#[doc(alias = "y")]`.
|
/// For example, `#[doc(alias = "x")]` and `#[doc(alias = "y")]`.
|
||||||
|
@ -462,6 +484,7 @@ impl AttrsWithOwner {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it),
|
AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it),
|
||||||
|
AttrDefId::ExternCrateId(it) => attrs_from_item_tree_loc(db, it),
|
||||||
};
|
};
|
||||||
|
|
||||||
let attrs = raw_attrs.filter(db.upcast(), def.krate(db));
|
let attrs = raw_attrs.filter(db.upcast(), def.krate(db));
|
||||||
|
@ -546,6 +569,7 @@ impl AttrsWithOwner {
|
||||||
.map(|source| ast::AnyHasAttrs::new(source[id.local_id].clone())),
|
.map(|source| ast::AnyHasAttrs::new(source[id.local_id].clone())),
|
||||||
},
|
},
|
||||||
AttrDefId::ExternBlockId(id) => any_has_attrs(db, id),
|
AttrDefId::ExternBlockId(id) => any_has_attrs(db, id),
|
||||||
|
AttrDefId::ExternCrateId(id) => any_has_attrs(db, id),
|
||||||
};
|
};
|
||||||
|
|
||||||
AttrSourceMap::new(owner.as_ref().map(|node| node as &dyn HasAttrs))
|
AttrSourceMap::new(owner.as_ref().map(|node| node as &dyn HasAttrs))
|
||||||
|
|
|
@ -273,10 +273,10 @@ impl Body {
|
||||||
|
|
||||||
pub fn is_binding_upvar(&self, binding: BindingId, relative_to: ExprId) -> bool {
|
pub fn is_binding_upvar(&self, binding: BindingId, relative_to: ExprId) -> bool {
|
||||||
match self.binding_owners.get(&binding) {
|
match self.binding_owners.get(&binding) {
|
||||||
Some(x) => {
|
Some(it) => {
|
||||||
// We assign expression ids in a way that outer closures will receive
|
// We assign expression ids in a way that outer closures will receive
|
||||||
// a lower id
|
// a lower id
|
||||||
x.into_raw() < relative_to.into_raw()
|
it.into_raw() < relative_to.into_raw()
|
||||||
}
|
}
|
||||||
None => true,
|
None => true,
|
||||||
}
|
}
|
||||||
|
|
|
@ -297,11 +297,11 @@ impl ExprCollector<'_> {
|
||||||
let (result_expr_id, prev_binding_owner) =
|
let (result_expr_id, prev_binding_owner) =
|
||||||
this.initialize_binding_owner(syntax_ptr);
|
this.initialize_binding_owner(syntax_ptr);
|
||||||
let inner_expr = this.collect_block(e);
|
let inner_expr = this.collect_block(e);
|
||||||
let x = this.db.intern_anonymous_const(ConstBlockLoc {
|
let it = this.db.intern_anonymous_const(ConstBlockLoc {
|
||||||
parent: this.owner,
|
parent: this.owner,
|
||||||
root: inner_expr,
|
root: inner_expr,
|
||||||
});
|
});
|
||||||
this.body.exprs[result_expr_id] = Expr::Const(x);
|
this.body.exprs[result_expr_id] = Expr::Const(it);
|
||||||
this.current_binding_owner = prev_binding_owner;
|
this.current_binding_owner = prev_binding_owner;
|
||||||
result_expr_id
|
result_expr_id
|
||||||
})
|
})
|
||||||
|
@ -324,10 +324,10 @@ impl ExprCollector<'_> {
|
||||||
ast::Expr::CallExpr(e) => {
|
ast::Expr::CallExpr(e) => {
|
||||||
let is_rustc_box = {
|
let is_rustc_box = {
|
||||||
let attrs = e.attrs();
|
let attrs = e.attrs();
|
||||||
attrs.filter_map(|x| x.as_simple_atom()).any(|x| x == "rustc_box")
|
attrs.filter_map(|it| it.as_simple_atom()).any(|it| it == "rustc_box")
|
||||||
};
|
};
|
||||||
if is_rustc_box {
|
if is_rustc_box {
|
||||||
let expr = self.collect_expr_opt(e.arg_list().and_then(|x| x.args().next()));
|
let expr = self.collect_expr_opt(e.arg_list().and_then(|it| it.args().next()));
|
||||||
self.alloc_expr(Expr::Box { expr }, syntax_ptr)
|
self.alloc_expr(Expr::Box { expr }, syntax_ptr)
|
||||||
} else {
|
} else {
|
||||||
let callee = self.collect_expr_opt(e.expr());
|
let callee = self.collect_expr_opt(e.expr());
|
||||||
|
@ -781,7 +781,7 @@ impl ExprCollector<'_> {
|
||||||
pat: self.alloc_pat_desugared(some_pat),
|
pat: self.alloc_pat_desugared(some_pat),
|
||||||
guard: None,
|
guard: None,
|
||||||
expr: self.with_opt_labeled_rib(label, |this| {
|
expr: self.with_opt_labeled_rib(label, |this| {
|
||||||
this.collect_expr_opt(e.loop_body().map(|x| x.into()))
|
this.collect_expr_opt(e.loop_body().map(|it| it.into()))
|
||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
let iter_name = Name::generate_new_name();
|
let iter_name = Name::generate_new_name();
|
||||||
|
@ -874,10 +874,10 @@ impl ExprCollector<'_> {
|
||||||
}),
|
}),
|
||||||
guard: None,
|
guard: None,
|
||||||
expr: {
|
expr: {
|
||||||
let x = self.alloc_expr(Expr::Path(Path::from(break_name)), syntax_ptr.clone());
|
let it = self.alloc_expr(Expr::Path(Path::from(break_name)), syntax_ptr.clone());
|
||||||
let callee = self.alloc_expr(Expr::Path(try_from_residual), syntax_ptr.clone());
|
let callee = self.alloc_expr(Expr::Path(try_from_residual), syntax_ptr.clone());
|
||||||
let result = self.alloc_expr(
|
let result = self.alloc_expr(
|
||||||
Expr::Call { callee, args: Box::new([x]), is_assignee_expr: false },
|
Expr::Call { callee, args: Box::new([it]), is_assignee_expr: false },
|
||||||
syntax_ptr.clone(),
|
syntax_ptr.clone(),
|
||||||
);
|
);
|
||||||
self.alloc_expr(
|
self.alloc_expr(
|
||||||
|
@ -1240,12 +1240,12 @@ impl ExprCollector<'_> {
|
||||||
pats.push(self.collect_pat(first, binding_list));
|
pats.push(self.collect_pat(first, binding_list));
|
||||||
binding_list.reject_new = true;
|
binding_list.reject_new = true;
|
||||||
for rest in it {
|
for rest in it {
|
||||||
for (_, x) in binding_list.is_used.iter_mut() {
|
for (_, it) in binding_list.is_used.iter_mut() {
|
||||||
*x = false;
|
*it = false;
|
||||||
}
|
}
|
||||||
pats.push(self.collect_pat(rest, binding_list));
|
pats.push(self.collect_pat(rest, binding_list));
|
||||||
for (&id, &x) in binding_list.is_used.iter() {
|
for (&id, &is_used) in binding_list.is_used.iter() {
|
||||||
if !x {
|
if !is_used {
|
||||||
self.body.bindings[id].problems =
|
self.body.bindings[id].problems =
|
||||||
Some(BindingProblems::NotBoundAcrossAll);
|
Some(BindingProblems::NotBoundAcrossAll);
|
||||||
}
|
}
|
||||||
|
@ -1352,9 +1352,9 @@ impl ExprCollector<'_> {
|
||||||
// FIXME: implement in a way that also builds source map and calculates assoc resolutions in type inference.
|
// FIXME: implement in a way that also builds source map and calculates assoc resolutions in type inference.
|
||||||
ast::Pat::RangePat(p) => {
|
ast::Pat::RangePat(p) => {
|
||||||
let mut range_part_lower = |p: Option<ast::Pat>| {
|
let mut range_part_lower = |p: Option<ast::Pat>| {
|
||||||
p.and_then(|x| match &x {
|
p.and_then(|it| match &it {
|
||||||
ast::Pat::LiteralPat(x) => {
|
ast::Pat::LiteralPat(it) => {
|
||||||
Some(Box::new(LiteralOrConst::Literal(pat_literal_to_hir(x)?.0)))
|
Some(Box::new(LiteralOrConst::Literal(pat_literal_to_hir(it)?.0)))
|
||||||
}
|
}
|
||||||
ast::Pat::IdentPat(p) => {
|
ast::Pat::IdentPat(p) => {
|
||||||
let name =
|
let name =
|
||||||
|
@ -1451,9 +1451,7 @@ impl ExprCollector<'_> {
|
||||||
&self,
|
&self,
|
||||||
lifetime: Option<ast::Lifetime>,
|
lifetime: Option<ast::Lifetime>,
|
||||||
) -> Result<Option<LabelId>, BodyDiagnostic> {
|
) -> Result<Option<LabelId>, BodyDiagnostic> {
|
||||||
let Some(lifetime) = lifetime else {
|
let Some(lifetime) = lifetime else { return Ok(None) };
|
||||||
return Ok(None)
|
|
||||||
};
|
|
||||||
let name = Name::new_lifetime(&lifetime);
|
let name = Name::new_lifetime(&lifetime);
|
||||||
|
|
||||||
for (rib_idx, rib) in self.label_ribs.iter().enumerate().rev() {
|
for (rib_idx, rib) in self.label_ribs.iter().enumerate().rev() {
|
||||||
|
|
|
@ -105,7 +105,7 @@ struct Printer<'a> {
|
||||||
needs_indent: bool,
|
needs_indent: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Write for Printer<'a> {
|
impl Write for Printer<'_> {
|
||||||
fn write_str(&mut self, s: &str) -> fmt::Result {
|
fn write_str(&mut self, s: &str) -> fmt::Result {
|
||||||
for line in s.split_inclusive('\n') {
|
for line in s.split_inclusive('\n') {
|
||||||
if self.needs_indent {
|
if self.needs_indent {
|
||||||
|
@ -125,7 +125,7 @@ impl<'a> Write for Printer<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Printer<'a> {
|
impl Printer<'_> {
|
||||||
fn indented(&mut self, f: impl FnOnce(&mut Self)) {
|
fn indented(&mut self, f: impl FnOnce(&mut Self)) {
|
||||||
self.indent_level += 1;
|
self.indent_level += 1;
|
||||||
wln!(self);
|
wln!(self);
|
||||||
|
|
|
@ -3,12 +3,12 @@ mod block;
|
||||||
use base_db::{fixture::WithFixture, SourceDatabase};
|
use base_db::{fixture::WithFixture, SourceDatabase};
|
||||||
use expect_test::Expect;
|
use expect_test::Expect;
|
||||||
|
|
||||||
use crate::ModuleDefId;
|
use crate::{test_db::TestDB, ModuleDefId};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
fn lower(ra_fixture: &str) -> Arc<Body> {
|
fn lower(ra_fixture: &str) -> Arc<Body> {
|
||||||
let db = crate::test_db::TestDB::with_files(ra_fixture);
|
let db = TestDB::with_files(ra_fixture);
|
||||||
|
|
||||||
let krate = db.crate_graph().iter().next().unwrap();
|
let krate = db.crate_graph().iter().next().unwrap();
|
||||||
let def_map = db.crate_def_map(krate);
|
let def_map = db.crate_def_map(krate);
|
||||||
|
@ -25,15 +25,15 @@ fn lower(ra_fixture: &str) -> Arc<Body> {
|
||||||
db.body(fn_def.unwrap().into())
|
db.body(fn_def.unwrap().into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn block_def_map_at(ra_fixture: &str) -> String {
|
fn def_map_at(ra_fixture: &str) -> String {
|
||||||
let (db, position) = crate::test_db::TestDB::with_position(ra_fixture);
|
let (db, position) = TestDB::with_position(ra_fixture);
|
||||||
|
|
||||||
let module = db.module_at_position(position);
|
let module = db.module_at_position(position);
|
||||||
module.def_map(&db).dump(&db)
|
module.def_map(&db).dump(&db)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_block_scopes_at(ra_fixture: &str, expect: Expect) {
|
fn check_block_scopes_at(ra_fixture: &str, expect: Expect) {
|
||||||
let (db, position) = crate::test_db::TestDB::with_position(ra_fixture);
|
let (db, position) = TestDB::with_position(ra_fixture);
|
||||||
|
|
||||||
let module = db.module_at_position(position);
|
let module = db.module_at_position(position);
|
||||||
let actual = module.def_map(&db).dump_block_scopes(&db);
|
let actual = module.def_map(&db).dump_block_scopes(&db);
|
||||||
|
@ -41,7 +41,7 @@ fn check_block_scopes_at(ra_fixture: &str, expect: Expect) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_at(ra_fixture: &str, expect: Expect) {
|
fn check_at(ra_fixture: &str, expect: Expect) {
|
||||||
let actual = block_def_map_at(ra_fixture);
|
let actual = def_map_at(ra_fixture);
|
||||||
expect.assert_eq(&actual);
|
expect.assert_eq(&actual);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -133,6 +133,47 @@ struct Struct {}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn super_imports_2() {
|
||||||
|
check_at(
|
||||||
|
r#"
|
||||||
|
fn outer() {
|
||||||
|
mod m {
|
||||||
|
struct ResolveMe {}
|
||||||
|
fn middle() {
|
||||||
|
mod m2 {
|
||||||
|
fn inner() {
|
||||||
|
use super::ResolveMe;
|
||||||
|
$0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
block scope
|
||||||
|
ResolveMe: t
|
||||||
|
|
||||||
|
block scope
|
||||||
|
m2: t
|
||||||
|
|
||||||
|
block scope::m2
|
||||||
|
inner: v
|
||||||
|
|
||||||
|
block scope
|
||||||
|
m: t
|
||||||
|
|
||||||
|
block scope::m
|
||||||
|
ResolveMe: t
|
||||||
|
middle: v
|
||||||
|
|
||||||
|
crate
|
||||||
|
outer: v
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn nested_module_scoping() {
|
fn nested_module_scoping() {
|
||||||
check_block_scopes_at(
|
check_block_scopes_at(
|
||||||
|
@ -155,6 +196,42 @@ fn f() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn self_imports() {
|
||||||
|
check_at(
|
||||||
|
r#"
|
||||||
|
fn f() {
|
||||||
|
mod m {
|
||||||
|
struct ResolveMe {}
|
||||||
|
fn g() {
|
||||||
|
fn h() {
|
||||||
|
use self::ResolveMe;
|
||||||
|
$0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
block scope
|
||||||
|
ResolveMe: t
|
||||||
|
|
||||||
|
block scope
|
||||||
|
h: v
|
||||||
|
|
||||||
|
block scope
|
||||||
|
m: t
|
||||||
|
|
||||||
|
block scope::m
|
||||||
|
ResolveMe: t
|
||||||
|
g: v
|
||||||
|
|
||||||
|
crate
|
||||||
|
f: v
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn legacy_macro_items() {
|
fn legacy_macro_items() {
|
||||||
// Checks that legacy-scoped `macro_rules!` from parent namespaces are resolved and expanded
|
// Checks that legacy-scoped `macro_rules!` from parent namespaces are resolved and expanded
|
||||||
|
|
|
@ -24,11 +24,12 @@ use crate::{
|
||||||
proc_macro::{parse_macro_name_and_helper_attrs, ProcMacroKind},
|
proc_macro::{parse_macro_name_and_helper_attrs, ProcMacroKind},
|
||||||
DefMap, MacroSubNs,
|
DefMap, MacroSubNs,
|
||||||
},
|
},
|
||||||
|
path::ImportAlias,
|
||||||
type_ref::{TraitRef, TypeBound, TypeRef},
|
type_ref::{TraitRef, TypeBound, TypeRef},
|
||||||
visibility::RawVisibility,
|
visibility::RawVisibility,
|
||||||
AssocItemId, AstIdWithPath, ConstId, ConstLoc, FunctionId, FunctionLoc, HasModule, ImplId,
|
AssocItemId, AstIdWithPath, ConstId, ConstLoc, ExternCrateId, FunctionId, FunctionLoc,
|
||||||
Intern, ItemContainerId, ItemLoc, Lookup, Macro2Id, MacroRulesId, ModuleId, ProcMacroId,
|
HasModule, ImplId, Intern, ItemContainerId, ItemLoc, Lookup, Macro2Id, MacroRulesId, ModuleId,
|
||||||
StaticId, TraitAliasId, TraitId, TypeAliasId, TypeAliasLoc,
|
ProcMacroId, StaticId, TraitAliasId, TraitId, TypeAliasId, TypeAliasLoc,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
@ -424,6 +425,7 @@ impl MacroRulesData {
|
||||||
Arc::new(MacroRulesData { name: makro.name.clone(), macro_export })
|
Arc::new(MacroRulesData { name: makro.name.clone(), macro_export })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct ProcMacroData {
|
pub struct ProcMacroData {
|
||||||
pub name: Name,
|
pub name: Name,
|
||||||
|
@ -460,6 +462,30 @@ impl ProcMacroData {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct ExternCrateDeclData {
|
||||||
|
pub name: Name,
|
||||||
|
pub alias: Option<ImportAlias>,
|
||||||
|
pub visibility: RawVisibility,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExternCrateDeclData {
|
||||||
|
pub(crate) fn extern_crate_decl_data_query(
|
||||||
|
db: &dyn DefDatabase,
|
||||||
|
extern_crate: ExternCrateId,
|
||||||
|
) -> Arc<ExternCrateDeclData> {
|
||||||
|
let loc = extern_crate.lookup(db);
|
||||||
|
let item_tree = loc.id.item_tree(db);
|
||||||
|
let extern_crate = &item_tree[loc.id.value];
|
||||||
|
|
||||||
|
Arc::new(Self {
|
||||||
|
name: extern_crate.name.clone(),
|
||||||
|
visibility: item_tree[extern_crate.visibility].clone(),
|
||||||
|
alias: extern_crate.alias.clone(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct ConstData {
|
pub struct ConstData {
|
||||||
/// `None` for `const _: () = ();`
|
/// `None` for `const _: () = ();`
|
||||||
|
@ -573,7 +599,7 @@ impl<'a> AssocItemCollector<'a> {
|
||||||
if !attrs.is_cfg_enabled(self.expander.cfg_options()) {
|
if !attrs.is_cfg_enabled(self.expander.cfg_options()) {
|
||||||
self.diagnostics.push(DefDiagnostic::unconfigured_code(
|
self.diagnostics.push(DefDiagnostic::unconfigured_code(
|
||||||
self.module_id.local_id,
|
self.module_id.local_id,
|
||||||
InFile::new(self.expander.current_file_id(), item.ast_id(item_tree).upcast()),
|
InFile::new(self.expander.current_file_id(), item.ast_id(item_tree).erase()),
|
||||||
attrs.cfg().unwrap(),
|
attrs.cfg().unwrap(),
|
||||||
self.expander.cfg_options().clone(),
|
self.expander.cfg_options().clone(),
|
||||||
));
|
));
|
||||||
|
|
|
@ -18,7 +18,6 @@ use triomphe::Arc;
|
||||||
use crate::{
|
use crate::{
|
||||||
builtin_type::{BuiltinInt, BuiltinUint},
|
builtin_type::{BuiltinInt, BuiltinUint},
|
||||||
db::DefDatabase,
|
db::DefDatabase,
|
||||||
expander::CfgExpander,
|
|
||||||
item_tree::{AttrOwner, Field, FieldAstId, Fields, ItemTree, ModItem, RawVisibilityId},
|
item_tree::{AttrOwner, Field, FieldAstId, Fields, ItemTree, ModItem, RawVisibilityId},
|
||||||
lang_item::LangItem,
|
lang_item::LangItem,
|
||||||
lower::LowerCtx,
|
lower::LowerCtx,
|
||||||
|
@ -29,8 +28,8 @@ use crate::{
|
||||||
tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree},
|
tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree},
|
||||||
type_ref::TypeRef,
|
type_ref::TypeRef,
|
||||||
visibility::RawVisibility,
|
visibility::RawVisibility,
|
||||||
EnumId, LocalEnumVariantId, LocalFieldId, LocalModuleId, Lookup, ModuleId, StructId, UnionId,
|
EnumId, EnumLoc, LocalEnumVariantId, LocalFieldId, LocalModuleId, Lookup, ModuleId, StructId,
|
||||||
VariantId,
|
UnionId, VariantId,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Note that we use `StructData` for unions as well!
|
/// Note that we use `StructData` for unions as well!
|
||||||
|
@ -76,6 +75,7 @@ pub struct EnumData {
|
||||||
pub struct EnumVariantData {
|
pub struct EnumVariantData {
|
||||||
pub name: Name,
|
pub name: Name,
|
||||||
pub variant_data: Arc<VariantData>,
|
pub variant_data: Arc<VariantData>,
|
||||||
|
pub tree_id: la_arena::Idx<crate::item_tree::Variant>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
@ -147,6 +147,7 @@ fn parse_repr_tt(tt: &Subtree) -> Option<ReprOptions> {
|
||||||
}
|
}
|
||||||
"C" => ReprFlags::IS_C,
|
"C" => ReprFlags::IS_C,
|
||||||
"transparent" => ReprFlags::IS_TRANSPARENT,
|
"transparent" => ReprFlags::IS_TRANSPARENT,
|
||||||
|
"simd" => ReprFlags::IS_SIMD,
|
||||||
repr => {
|
repr => {
|
||||||
if let Some(builtin) = BuiltinInt::from_suffix(repr)
|
if let Some(builtin) = BuiltinInt::from_suffix(repr)
|
||||||
.map(Either::Left)
|
.map(Either::Left)
|
||||||
|
@ -325,11 +326,12 @@ impl EnumData {
|
||||||
variants.alloc(EnumVariantData {
|
variants.alloc(EnumVariantData {
|
||||||
name: var.name.clone(),
|
name: var.name.clone(),
|
||||||
variant_data: Arc::new(var_data),
|
variant_data: Arc::new(var_data),
|
||||||
|
tree_id,
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
diagnostics.push(DefDiagnostic::unconfigured_code(
|
diagnostics.push(DefDiagnostic::unconfigured_code(
|
||||||
loc.container.local_id,
|
loc.container.local_id,
|
||||||
InFile::new(loc.id.file_id(), var.ast_id.upcast()),
|
InFile::new(loc.id.file_id(), var.ast_id.erase()),
|
||||||
attrs.cfg().unwrap(),
|
attrs.cfg().unwrap(),
|
||||||
cfg_options.clone(),
|
cfg_options.clone(),
|
||||||
))
|
))
|
||||||
|
@ -367,9 +369,10 @@ impl HasChildSource<LocalEnumVariantId> for EnumId {
|
||||||
&self,
|
&self,
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
) -> InFile<ArenaMap<LocalEnumVariantId, Self::Value>> {
|
) -> InFile<ArenaMap<LocalEnumVariantId, Self::Value>> {
|
||||||
let src = self.lookup(db).source(db);
|
let loc = &self.lookup(db);
|
||||||
|
let src = loc.source(db);
|
||||||
let mut trace = Trace::new_for_map();
|
let mut trace = Trace::new_for_map();
|
||||||
lower_enum(db, &mut trace, &src, self.lookup(db).container);
|
lower_enum(db, &mut trace, &src, loc);
|
||||||
src.with_value(trace.into_map())
|
src.with_value(trace.into_map())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -378,31 +381,58 @@ fn lower_enum(
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
trace: &mut Trace<EnumVariantData, ast::Variant>,
|
trace: &mut Trace<EnumVariantData, ast::Variant>,
|
||||||
ast: &InFile<ast::Enum>,
|
ast: &InFile<ast::Enum>,
|
||||||
module_id: ModuleId,
|
loc: &EnumLoc,
|
||||||
) {
|
) {
|
||||||
let expander = CfgExpander::new(db, ast.file_id, module_id.krate);
|
let item_tree = loc.id.item_tree(db);
|
||||||
|
let krate = loc.container.krate;
|
||||||
|
|
||||||
|
let item_tree_variants = item_tree[loc.id.value].variants.clone();
|
||||||
|
|
||||||
|
let cfg_options = &db.crate_graph()[krate].cfg_options;
|
||||||
let variants = ast
|
let variants = ast
|
||||||
.value
|
.value
|
||||||
.variant_list()
|
.variant_list()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|it| it.variants())
|
.flat_map(|it| it.variants())
|
||||||
.filter(|var| expander.is_cfg_enabled(db, var));
|
.zip(item_tree_variants)
|
||||||
for var in variants {
|
.filter(|&(_, item_tree_id)| {
|
||||||
|
item_tree.attrs(db, krate, item_tree_id.into()).is_cfg_enabled(cfg_options)
|
||||||
|
});
|
||||||
|
for (var, item_tree_id) in variants {
|
||||||
trace.alloc(
|
trace.alloc(
|
||||||
|| var.clone(),
|
|| var.clone(),
|
||||||
|| EnumVariantData {
|
|| EnumVariantData {
|
||||||
name: var.name().map_or_else(Name::missing, |it| it.as_name()),
|
name: var.name().map_or_else(Name::missing, |it| it.as_name()),
|
||||||
variant_data: Arc::new(VariantData::new(db, ast.with_value(var.kind()), module_id)),
|
variant_data: Arc::new(VariantData::new(
|
||||||
|
db,
|
||||||
|
ast.with_value(var.kind()),
|
||||||
|
loc.container,
|
||||||
|
&item_tree,
|
||||||
|
item_tree_id,
|
||||||
|
)),
|
||||||
|
tree_id: item_tree_id,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl VariantData {
|
impl VariantData {
|
||||||
fn new(db: &dyn DefDatabase, flavor: InFile<ast::StructKind>, module_id: ModuleId) -> Self {
|
fn new(
|
||||||
let mut expander = CfgExpander::new(db, flavor.file_id, module_id.krate);
|
db: &dyn DefDatabase,
|
||||||
|
flavor: InFile<ast::StructKind>,
|
||||||
|
module_id: ModuleId,
|
||||||
|
item_tree: &ItemTree,
|
||||||
|
variant: la_arena::Idx<crate::item_tree::Variant>,
|
||||||
|
) -> Self {
|
||||||
let mut trace = Trace::new_for_arena();
|
let mut trace = Trace::new_for_arena();
|
||||||
match lower_struct(db, &mut expander, &mut trace, &flavor) {
|
match lower_struct(
|
||||||
|
db,
|
||||||
|
&mut trace,
|
||||||
|
&flavor,
|
||||||
|
module_id.krate,
|
||||||
|
item_tree,
|
||||||
|
&item_tree[variant].fields,
|
||||||
|
) {
|
||||||
StructKind::Tuple => VariantData::Tuple(trace.into_arena()),
|
StructKind::Tuple => VariantData::Tuple(trace.into_arena()),
|
||||||
StructKind::Record => VariantData::Record(trace.into_arena()),
|
StructKind::Record => VariantData::Record(trace.into_arena()),
|
||||||
StructKind::Unit => VariantData::Unit,
|
StructKind::Unit => VariantData::Unit,
|
||||||
|
@ -434,28 +464,43 @@ impl HasChildSource<LocalFieldId> for VariantId {
|
||||||
type Value = Either<ast::TupleField, ast::RecordField>;
|
type Value = Either<ast::TupleField, ast::RecordField>;
|
||||||
|
|
||||||
fn child_source(&self, db: &dyn DefDatabase) -> InFile<ArenaMap<LocalFieldId, Self::Value>> {
|
fn child_source(&self, db: &dyn DefDatabase) -> InFile<ArenaMap<LocalFieldId, Self::Value>> {
|
||||||
let (src, module_id) = match self {
|
let item_tree;
|
||||||
|
let (src, fields, container) = match *self {
|
||||||
VariantId::EnumVariantId(it) => {
|
VariantId::EnumVariantId(it) => {
|
||||||
// I don't really like the fact that we call into parent source
|
// I don't really like the fact that we call into parent source
|
||||||
// here, this might add to more queries then necessary.
|
// here, this might add to more queries then necessary.
|
||||||
|
let lookup = it.parent.lookup(db);
|
||||||
|
item_tree = lookup.id.item_tree(db);
|
||||||
let src = it.parent.child_source(db);
|
let src = it.parent.child_source(db);
|
||||||
(src.map(|map| map[it.local_id].kind()), it.parent.lookup(db).container)
|
let tree_id = db.enum_data(it.parent).variants[it.local_id].tree_id;
|
||||||
|
let fields = &item_tree[tree_id].fields;
|
||||||
|
(src.map(|map| map[it.local_id].kind()), fields, lookup.container)
|
||||||
}
|
}
|
||||||
VariantId::StructId(it) => {
|
VariantId::StructId(it) => {
|
||||||
(it.lookup(db).source(db).map(|it| it.kind()), it.lookup(db).container)
|
let lookup = it.lookup(db);
|
||||||
|
item_tree = lookup.id.item_tree(db);
|
||||||
|
(
|
||||||
|
lookup.source(db).map(|it| it.kind()),
|
||||||
|
&item_tree[lookup.id.value].fields,
|
||||||
|
lookup.container,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
VariantId::UnionId(it) => (
|
VariantId::UnionId(it) => {
|
||||||
it.lookup(db).source(db).map(|it| {
|
let lookup = it.lookup(db);
|
||||||
|
item_tree = lookup.id.item_tree(db);
|
||||||
|
(
|
||||||
|
lookup.source(db).map(|it| {
|
||||||
it.record_field_list()
|
it.record_field_list()
|
||||||
.map(ast::StructKind::Record)
|
.map(ast::StructKind::Record)
|
||||||
.unwrap_or(ast::StructKind::Unit)
|
.unwrap_or(ast::StructKind::Unit)
|
||||||
}),
|
}),
|
||||||
it.lookup(db).container,
|
&item_tree[lookup.id.value].fields,
|
||||||
),
|
lookup.container,
|
||||||
|
)
|
||||||
|
}
|
||||||
};
|
};
|
||||||
let mut expander = CfgExpander::new(db, src.file_id, module_id.krate);
|
|
||||||
let mut trace = Trace::new_for_map();
|
let mut trace = Trace::new_for_map();
|
||||||
lower_struct(db, &mut expander, &mut trace, &src);
|
lower_struct(db, &mut trace, &src, container.krate, &item_tree, fields);
|
||||||
src.with_value(trace.into_map())
|
src.with_value(trace.into_map())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -469,16 +514,19 @@ pub enum StructKind {
|
||||||
|
|
||||||
fn lower_struct(
|
fn lower_struct(
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
expander: &mut CfgExpander,
|
|
||||||
trace: &mut Trace<FieldData, Either<ast::TupleField, ast::RecordField>>,
|
trace: &mut Trace<FieldData, Either<ast::TupleField, ast::RecordField>>,
|
||||||
ast: &InFile<ast::StructKind>,
|
ast: &InFile<ast::StructKind>,
|
||||||
|
krate: CrateId,
|
||||||
|
item_tree: &ItemTree,
|
||||||
|
fields: &Fields,
|
||||||
) -> StructKind {
|
) -> StructKind {
|
||||||
let ctx = LowerCtx::new(db, &expander.hygiene(), ast.file_id);
|
let ctx = LowerCtx::with_file_id(db, ast.file_id);
|
||||||
|
|
||||||
match &ast.value {
|
match (&ast.value, fields) {
|
||||||
ast::StructKind::Tuple(fl) => {
|
(ast::StructKind::Tuple(fl), Fields::Tuple(fields)) => {
|
||||||
for (i, fd) in fl.fields().enumerate() {
|
let cfg_options = &db.crate_graph()[krate].cfg_options;
|
||||||
if !expander.is_cfg_enabled(db, &fd) {
|
for ((i, fd), item_tree_id) in fl.fields().enumerate().zip(fields.clone()) {
|
||||||
|
if !item_tree.attrs(db, krate, item_tree_id.into()).is_cfg_enabled(cfg_options) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -493,9 +541,10 @@ fn lower_struct(
|
||||||
}
|
}
|
||||||
StructKind::Tuple
|
StructKind::Tuple
|
||||||
}
|
}
|
||||||
ast::StructKind::Record(fl) => {
|
(ast::StructKind::Record(fl), Fields::Record(fields)) => {
|
||||||
for fd in fl.fields() {
|
let cfg_options = &db.crate_graph()[krate].cfg_options;
|
||||||
if !expander.is_cfg_enabled(db, &fd) {
|
for (fd, item_tree_id) in fl.fields().zip(fields.clone()) {
|
||||||
|
if !item_tree.attrs(db, krate, item_tree_id.into()).is_cfg_enabled(cfg_options) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -510,7 +559,7 @@ fn lower_struct(
|
||||||
}
|
}
|
||||||
StructKind::Record
|
StructKind::Record
|
||||||
}
|
}
|
||||||
ast::StructKind::Unit => StructKind::Unit,
|
_ => StructKind::Unit,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -539,8 +588,8 @@ fn lower_fields(
|
||||||
InFile::new(
|
InFile::new(
|
||||||
current_file_id,
|
current_file_id,
|
||||||
match field.ast_id {
|
match field.ast_id {
|
||||||
FieldAstId::Record(it) => it.upcast(),
|
FieldAstId::Record(it) => it.erase(),
|
||||||
FieldAstId::Tuple(it) => it.upcast(),
|
FieldAstId::Tuple(it) => it.erase(),
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
attrs.cfg().unwrap(),
|
attrs.cfg().unwrap(),
|
||||||
|
@ -563,8 +612,8 @@ fn lower_fields(
|
||||||
InFile::new(
|
InFile::new(
|
||||||
current_file_id,
|
current_file_id,
|
||||||
match field.ast_id {
|
match field.ast_id {
|
||||||
FieldAstId::Record(it) => it.upcast(),
|
FieldAstId::Record(it) => it.erase(),
|
||||||
FieldAstId::Tuple(it) => it.upcast(),
|
FieldAstId::Tuple(it) => it.erase(),
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
attrs.cfg().unwrap(),
|
attrs.cfg().unwrap(),
|
||||||
|
|
|
@ -12,27 +12,31 @@ use crate::{
|
||||||
body::{scope::ExprScopes, Body, BodySourceMap},
|
body::{scope::ExprScopes, Body, BodySourceMap},
|
||||||
data::{
|
data::{
|
||||||
adt::{EnumData, StructData},
|
adt::{EnumData, StructData},
|
||||||
ConstData, FunctionData, ImplData, Macro2Data, MacroRulesData, ProcMacroData, StaticData,
|
ConstData, ExternCrateDeclData, FunctionData, ImplData, Macro2Data, MacroRulesData,
|
||||||
TraitAliasData, TraitData, TypeAliasData,
|
ProcMacroData, StaticData, TraitAliasData, TraitData, TypeAliasData,
|
||||||
},
|
},
|
||||||
generics::GenericParams,
|
generics::GenericParams,
|
||||||
import_map::ImportMap,
|
import_map::ImportMap,
|
||||||
item_tree::{AttrOwner, ItemTree},
|
item_tree::{AttrOwner, ItemTree},
|
||||||
lang_item::{LangItem, LangItemTarget, LangItems},
|
lang_item::{self, LangItem, LangItemTarget, LangItems},
|
||||||
nameres::{diagnostics::DefDiagnostic, DefMap},
|
nameres::{diagnostics::DefDiagnostic, DefMap},
|
||||||
visibility::{self, Visibility},
|
visibility::{self, Visibility},
|
||||||
AttrDefId, BlockId, BlockLoc, ConstBlockId, ConstBlockLoc, ConstId, ConstLoc, DefWithBodyId,
|
AttrDefId, BlockId, BlockLoc, ConstBlockId, ConstBlockLoc, ConstId, ConstLoc, DefWithBodyId,
|
||||||
EnumId, EnumLoc, ExternBlockId, ExternBlockLoc, FunctionId, FunctionLoc, GenericDefId, ImplId,
|
EnumId, EnumLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId,
|
||||||
ImplLoc, InTypeConstId, InTypeConstLoc, LocalEnumVariantId, LocalFieldId, Macro2Id, Macro2Loc,
|
FunctionLoc, GenericDefId, ImplId, ImplLoc, ImportId, ImportLoc, InTypeConstId, InTypeConstLoc,
|
||||||
MacroRulesId, MacroRulesLoc, ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId,
|
LocalEnumVariantId, LocalFieldId, Macro2Id, Macro2Loc, MacroRulesId, MacroRulesLoc,
|
||||||
StructLoc, TraitAliasId, TraitAliasLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId,
|
ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId, StructLoc, TraitAliasId,
|
||||||
UnionLoc, VariantId,
|
TraitAliasLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, VariantId,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[salsa::query_group(InternDatabaseStorage)]
|
#[salsa::query_group(InternDatabaseStorage)]
|
||||||
pub trait InternDatabase: SourceDatabase {
|
pub trait InternDatabase: SourceDatabase {
|
||||||
// region: items
|
// region: items
|
||||||
#[salsa::interned]
|
#[salsa::interned]
|
||||||
|
fn intern_import(&self, loc: ImportLoc) -> ImportId;
|
||||||
|
#[salsa::interned]
|
||||||
|
fn intern_extern_crate(&self, loc: ExternCrateLoc) -> ExternCrateId;
|
||||||
|
#[salsa::interned]
|
||||||
fn intern_function(&self, loc: FunctionLoc) -> FunctionId;
|
fn intern_function(&self, loc: FunctionLoc) -> FunctionId;
|
||||||
#[salsa::interned]
|
#[salsa::interned]
|
||||||
fn intern_struct(&self, loc: StructLoc) -> StructId;
|
fn intern_struct(&self, loc: StructLoc) -> StructId;
|
||||||
|
@ -160,6 +164,9 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
|
||||||
#[salsa::invoke(ProcMacroData::proc_macro_data_query)]
|
#[salsa::invoke(ProcMacroData::proc_macro_data_query)]
|
||||||
fn proc_macro_data(&self, makro: ProcMacroId) -> Arc<ProcMacroData>;
|
fn proc_macro_data(&self, makro: ProcMacroId) -> Arc<ProcMacroData>;
|
||||||
|
|
||||||
|
#[salsa::invoke(ExternCrateDeclData::extern_crate_decl_data_query)]
|
||||||
|
fn extern_crate_decl_data(&self, extern_crate: ExternCrateId) -> Arc<ExternCrateDeclData>;
|
||||||
|
|
||||||
// endregion:data
|
// endregion:data
|
||||||
|
|
||||||
#[salsa::invoke(Body::body_with_source_map_query)]
|
#[salsa::invoke(Body::body_with_source_map_query)]
|
||||||
|
@ -197,6 +204,9 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
|
||||||
#[salsa::invoke(AttrsWithOwner::attrs_query)]
|
#[salsa::invoke(AttrsWithOwner::attrs_query)]
|
||||||
fn attrs(&self, def: AttrDefId) -> Attrs;
|
fn attrs(&self, def: AttrDefId) -> Attrs;
|
||||||
|
|
||||||
|
#[salsa::invoke(lang_item::lang_attr_query)]
|
||||||
|
fn lang_attr(&self, def: AttrDefId) -> Option<LangItem>;
|
||||||
|
|
||||||
#[salsa::transparent]
|
#[salsa::transparent]
|
||||||
#[salsa::invoke(AttrsWithOwner::attrs_with_owner)]
|
#[salsa::invoke(AttrsWithOwner::attrs_with_owner)]
|
||||||
fn attrs_with_owner(&self, def: AttrDefId) -> AttrsWithOwner;
|
fn attrs_with_owner(&self, def: AttrDefId) -> AttrsWithOwner;
|
||||||
|
|
|
@ -8,8 +8,8 @@ use syntax::{ast, AstNode, AstPtr};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
dyn_map::{DynMap, Policy},
|
dyn_map::{DynMap, Policy},
|
||||||
ConstId, EnumId, EnumVariantId, FieldId, FunctionId, ImplId, LifetimeParamId, Macro2Id,
|
ConstId, EnumId, EnumVariantId, ExternCrateId, FieldId, FunctionId, ImplId, LifetimeParamId,
|
||||||
MacroRulesId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId,
|
Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId,
|
||||||
TypeOrConstParamId, UnionId,
|
TypeOrConstParamId, UnionId,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -25,6 +25,7 @@ pub const TRAIT_ALIAS: Key<ast::TraitAlias, TraitAliasId> = Key::new();
|
||||||
pub const STRUCT: Key<ast::Struct, StructId> = Key::new();
|
pub const STRUCT: Key<ast::Struct, StructId> = Key::new();
|
||||||
pub const UNION: Key<ast::Union, UnionId> = Key::new();
|
pub const UNION: Key<ast::Union, UnionId> = Key::new();
|
||||||
pub const ENUM: Key<ast::Enum, EnumId> = Key::new();
|
pub const ENUM: Key<ast::Enum, EnumId> = Key::new();
|
||||||
|
pub const EXTERN_CRATE: Key<ast::ExternCrate, ExternCrateId> = Key::new();
|
||||||
|
|
||||||
pub const VARIANT: Key<ast::Variant, EnumVariantId> = Key::new();
|
pub const VARIANT: Key<ast::Variant, EnumVariantId> = Key::new();
|
||||||
pub const TUPLE_FIELD: Key<ast::TupleField, FieldId> = Key::new();
|
pub const TUPLE_FIELD: Key<ast::TupleField, FieldId> = Key::new();
|
||||||
|
|
|
@ -15,18 +15,11 @@ use crate::{
|
||||||
MacroId, ModuleId,
|
MacroId, ModuleId,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// A subset of Expander that only deals with cfg attributes. We only need it to
|
|
||||||
/// avoid cyclic queries in crate def map during enum processing.
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) struct CfgExpander {
|
pub struct Expander {
|
||||||
cfg_options: CfgOptions,
|
cfg_options: CfgOptions,
|
||||||
hygiene: Hygiene,
|
hygiene: Hygiene,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Expander {
|
|
||||||
cfg_expander: CfgExpander,
|
|
||||||
pub(crate) current_file_id: HirFileId,
|
pub(crate) current_file_id: HirFileId,
|
||||||
pub(crate) module: ModuleId,
|
pub(crate) module: ModuleId,
|
||||||
/// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached.
|
/// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached.
|
||||||
|
@ -34,41 +27,23 @@ pub struct Expander {
|
||||||
recursion_limit: Limit,
|
recursion_limit: Limit,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CfgExpander {
|
|
||||||
pub(crate) fn new(
|
|
||||||
db: &dyn DefDatabase,
|
|
||||||
current_file_id: HirFileId,
|
|
||||||
krate: CrateId,
|
|
||||||
) -> CfgExpander {
|
|
||||||
let hygiene = Hygiene::new(db.upcast(), current_file_id);
|
|
||||||
let cfg_options = db.crate_graph()[krate].cfg_options.clone();
|
|
||||||
CfgExpander { cfg_options, hygiene, krate }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
|
|
||||||
Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, &self.hygiene))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn is_cfg_enabled(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> bool {
|
|
||||||
let attrs = self.parse_attrs(db, owner);
|
|
||||||
attrs.is_cfg_enabled(&self.cfg_options)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn hygiene(&self) -> &Hygiene {
|
|
||||||
&self.hygiene
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Expander {
|
impl Expander {
|
||||||
pub fn new(db: &dyn DefDatabase, current_file_id: HirFileId, module: ModuleId) -> Expander {
|
pub fn new(db: &dyn DefDatabase, current_file_id: HirFileId, module: ModuleId) -> Expander {
|
||||||
let cfg_expander = CfgExpander::new(db, current_file_id, module.krate);
|
|
||||||
let recursion_limit = db.recursion_limit(module.krate);
|
let recursion_limit = db.recursion_limit(module.krate);
|
||||||
#[cfg(not(test))]
|
#[cfg(not(test))]
|
||||||
let recursion_limit = Limit::new(recursion_limit as usize);
|
let recursion_limit = Limit::new(recursion_limit as usize);
|
||||||
// Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug
|
// Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
let recursion_limit = Limit::new(std::cmp::min(32, recursion_limit as usize));
|
let recursion_limit = Limit::new(std::cmp::min(32, recursion_limit as usize));
|
||||||
Expander { cfg_expander, current_file_id, module, recursion_depth: 0, recursion_limit }
|
Expander {
|
||||||
|
current_file_id,
|
||||||
|
module,
|
||||||
|
recursion_depth: 0,
|
||||||
|
recursion_limit,
|
||||||
|
cfg_options: db.crate_graph()[module.krate].cfg_options.clone(),
|
||||||
|
hygiene: Hygiene::new(db.upcast(), current_file_id),
|
||||||
|
krate: module.krate,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn enter_expand<T: ast::AstNode>(
|
pub fn enter_expand<T: ast::AstNode>(
|
||||||
|
@ -120,7 +95,7 @@ impl Expander {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) {
|
pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) {
|
||||||
self.cfg_expander.hygiene = Hygiene::new(db.upcast(), mark.file_id);
|
self.hygiene = Hygiene::new(db.upcast(), mark.file_id);
|
||||||
self.current_file_id = mark.file_id;
|
self.current_file_id = mark.file_id;
|
||||||
if self.recursion_depth == u32::MAX {
|
if self.recursion_depth == u32::MAX {
|
||||||
// Recursion limit has been reached somewhere in the macro expansion tree. Reset the
|
// Recursion limit has been reached somewhere in the macro expansion tree. Reset the
|
||||||
|
@ -135,7 +110,7 @@ impl Expander {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ctx<'a>(&self, db: &'a dyn DefDatabase) -> LowerCtx<'a> {
|
pub fn ctx<'a>(&self, db: &'a dyn DefDatabase) -> LowerCtx<'a> {
|
||||||
LowerCtx::new(db, &self.cfg_expander.hygiene, self.current_file_id)
|
LowerCtx::new(db, &self.hygiene, self.current_file_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn to_source<T>(&self, value: T) -> InFile<T> {
|
pub(crate) fn to_source<T>(&self, value: T) -> InFile<T> {
|
||||||
|
@ -143,11 +118,11 @@ impl Expander {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
|
pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
|
||||||
self.cfg_expander.parse_attrs(db, owner)
|
Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, &self.hygiene))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn cfg_options(&self) -> &CfgOptions {
|
pub(crate) fn cfg_options(&self) -> &CfgOptions {
|
||||||
&self.cfg_expander.cfg_options
|
&self.cfg_options
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn current_file_id(&self) -> HirFileId {
|
pub fn current_file_id(&self) -> HirFileId {
|
||||||
|
@ -155,7 +130,7 @@ impl Expander {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option<Path> {
|
pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option<Path> {
|
||||||
let ctx = LowerCtx::new(db, &self.cfg_expander.hygiene, self.current_file_id);
|
let ctx = LowerCtx::new(db, &self.hygiene, self.current_file_id);
|
||||||
Path::from_src(path, &ctx)
|
Path::from_src(path, &ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -194,7 +169,7 @@ impl Expander {
|
||||||
let parse = value.cast::<T>()?;
|
let parse = value.cast::<T>()?;
|
||||||
|
|
||||||
self.recursion_depth += 1;
|
self.recursion_depth += 1;
|
||||||
self.cfg_expander.hygiene = Hygiene::new(db.upcast(), file_id);
|
self.hygiene = Hygiene::new(db.upcast(), file_id);
|
||||||
let old_file_id = std::mem::replace(&mut self.current_file_id, file_id);
|
let old_file_id = std::mem::replace(&mut self.current_file_id, file_id);
|
||||||
let mark =
|
let mark =
|
||||||
Mark { file_id: old_file_id, bomb: DropBomb::new("expansion mark dropped") };
|
Mark { file_id: old_file_id, bomb: DropBomb::new("expansion mark dropped") };
|
||||||
|
|
|
@ -360,7 +360,7 @@ fn calculate_best_path(
|
||||||
prefer_no_std,
|
prefer_no_std,
|
||||||
)?;
|
)?;
|
||||||
cov_mark::hit!(partially_imported);
|
cov_mark::hit!(partially_imported);
|
||||||
path.push_segment(info.path.segments.last()?.clone());
|
path.push_segment(info.name.clone());
|
||||||
Some(path)
|
Some(path)
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|
|
@ -67,21 +67,21 @@ pub enum TypeOrConstParamData {
|
||||||
impl TypeOrConstParamData {
|
impl TypeOrConstParamData {
|
||||||
pub fn name(&self) -> Option<&Name> {
|
pub fn name(&self) -> Option<&Name> {
|
||||||
match self {
|
match self {
|
||||||
TypeOrConstParamData::TypeParamData(x) => x.name.as_ref(),
|
TypeOrConstParamData::TypeParamData(it) => it.name.as_ref(),
|
||||||
TypeOrConstParamData::ConstParamData(x) => Some(&x.name),
|
TypeOrConstParamData::ConstParamData(it) => Some(&it.name),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn has_default(&self) -> bool {
|
pub fn has_default(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
TypeOrConstParamData::TypeParamData(x) => x.default.is_some(),
|
TypeOrConstParamData::TypeParamData(it) => it.default.is_some(),
|
||||||
TypeOrConstParamData::ConstParamData(x) => x.has_default,
|
TypeOrConstParamData::ConstParamData(it) => it.has_default,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn type_param(&self) -> Option<&TypeParamData> {
|
pub fn type_param(&self) -> Option<&TypeParamData> {
|
||||||
match self {
|
match self {
|
||||||
TypeOrConstParamData::TypeParamData(x) => Some(x),
|
TypeOrConstParamData::TypeParamData(it) => Some(it),
|
||||||
TypeOrConstParamData::ConstParamData(_) => None,
|
TypeOrConstParamData::ConstParamData(_) => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -89,14 +89,14 @@ impl TypeOrConstParamData {
|
||||||
pub fn const_param(&self) -> Option<&ConstParamData> {
|
pub fn const_param(&self) -> Option<&ConstParamData> {
|
||||||
match self {
|
match self {
|
||||||
TypeOrConstParamData::TypeParamData(_) => None,
|
TypeOrConstParamData::TypeParamData(_) => None,
|
||||||
TypeOrConstParamData::ConstParamData(x) => Some(x),
|
TypeOrConstParamData::ConstParamData(it) => Some(it),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_trait_self(&self) -> bool {
|
pub fn is_trait_self(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
TypeOrConstParamData::TypeParamData(x) => {
|
TypeOrConstParamData::TypeParamData(it) => {
|
||||||
x.provenance == TypeParamProvenance::TraitSelf
|
it.provenance == TypeParamProvenance::TraitSelf
|
||||||
}
|
}
|
||||||
TypeOrConstParamData::ConstParamData(_) => false,
|
TypeOrConstParamData::ConstParamData(_) => false,
|
||||||
}
|
}
|
||||||
|
|
|
@ -425,8 +425,8 @@ impl ConstRef {
|
||||||
}
|
}
|
||||||
match expr {
|
match expr {
|
||||||
ast::Expr::PathExpr(p) if is_path_ident(&p) => {
|
ast::Expr::PathExpr(p) if is_path_ident(&p) => {
|
||||||
match p.path().and_then(|x| x.segment()).and_then(|x| x.name_ref()) {
|
match p.path().and_then(|it| it.segment()).and_then(|it| it.name_ref()) {
|
||||||
Some(x) => Self::Path(x.as_name()),
|
Some(it) => Self::Path(it.as_name()),
|
||||||
None => Self::Scalar(LiteralConstRef::Unknown),
|
None => Self::Scalar(LiteralConstRef::Unknown),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,13 +1,14 @@
|
||||||
//! A map of all publicly exported items in a crate.
|
//! A map of all publicly exported items in a crate.
|
||||||
|
|
||||||
|
use std::collections::hash_map::Entry;
|
||||||
use std::{fmt, hash::BuildHasherDefault};
|
use std::{fmt, hash::BuildHasherDefault};
|
||||||
|
|
||||||
use base_db::CrateId;
|
use base_db::CrateId;
|
||||||
use fst::{self, Streamer};
|
use fst::{self, Streamer};
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
use indexmap::{map::Entry, IndexMap};
|
use indexmap::IndexMap;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use rustc_hash::{FxHashSet, FxHasher};
|
use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -17,52 +18,23 @@ use crate::{
|
||||||
|
|
||||||
type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<FxHasher>>;
|
type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||||
|
|
||||||
|
// FIXME: Support aliases: an item may be exported under multiple names, so `ImportInfo` should
|
||||||
|
// have `Vec<(Name, ModuleId)>` instead of `(Name, ModuleId)`.
|
||||||
/// Item import details stored in the `ImportMap`.
|
/// Item import details stored in the `ImportMap`.
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
pub struct ImportInfo {
|
pub struct ImportInfo {
|
||||||
/// A path that can be used to import the item, relative to the crate's root.
|
/// A name that can be used to import the item, relative to the crate's root.
|
||||||
pub path: ImportPath,
|
pub name: Name,
|
||||||
/// The module containing this item.
|
/// The module containing this item.
|
||||||
pub container: ModuleId,
|
pub container: ModuleId,
|
||||||
/// Whether the import is a trait associated item or not.
|
/// Whether the import is a trait associated item or not.
|
||||||
pub is_trait_assoc_item: bool,
|
pub is_trait_assoc_item: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
/// A map from publicly exported items to its name.
|
||||||
pub struct ImportPath {
|
|
||||||
pub segments: Vec<Name>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ImportPath {
|
|
||||||
pub fn display<'a>(&'a self, db: &'a dyn DefDatabase) -> impl fmt::Display + 'a {
|
|
||||||
struct Display<'a> {
|
|
||||||
db: &'a dyn DefDatabase,
|
|
||||||
path: &'a ImportPath,
|
|
||||||
}
|
|
||||||
impl fmt::Display for Display<'_> {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
fmt::Display::fmt(
|
|
||||||
&self.path.segments.iter().map(|it| it.display(self.db.upcast())).format("::"),
|
|
||||||
f,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Display { db, path: self }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn len(&self) -> usize {
|
|
||||||
self.segments.len()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A map from publicly exported items to the path needed to import/name them from a downstream
|
|
||||||
/// crate.
|
|
||||||
///
|
///
|
||||||
/// Reexports of items are taken into account, ie. if something is exported under multiple
|
/// Reexports of items are taken into account, ie. if something is exported under multiple
|
||||||
/// names, the one with the shortest import path will be used.
|
/// names, the one with the shortest import path will be used.
|
||||||
///
|
|
||||||
/// Note that all paths are relative to the containing crate's root, so the crate name still needs
|
|
||||||
/// to be prepended to the `ModPath` before the path is valid.
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct ImportMap {
|
pub struct ImportMap {
|
||||||
map: FxIndexMap<ItemInNs, ImportInfo>,
|
map: FxIndexMap<ItemInNs, ImportInfo>,
|
||||||
|
@ -70,84 +42,124 @@ pub struct ImportMap {
|
||||||
/// List of keys stored in `map`, sorted lexicographically by their `ModPath`. Indexed by the
|
/// List of keys stored in `map`, sorted lexicographically by their `ModPath`. Indexed by the
|
||||||
/// values returned by running `fst`.
|
/// values returned by running `fst`.
|
||||||
///
|
///
|
||||||
/// Since a path can refer to multiple items due to namespacing, we store all items with the
|
/// Since a name can refer to multiple items due to namespacing, we store all items with the
|
||||||
/// same path right after each other. This allows us to find all items after the FST gives us
|
/// same name right after each other. This allows us to find all items after the FST gives us
|
||||||
/// the index of the first one.
|
/// the index of the first one.
|
||||||
importables: Vec<ItemInNs>,
|
importables: Vec<ItemInNs>,
|
||||||
fst: fst::Map<Vec<u8>>,
|
fst: fst::Map<Vec<u8>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ImportMap {
|
impl ImportMap {
|
||||||
pub fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
|
pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
|
||||||
let _p = profile::span("import_map_query");
|
let _p = profile::span("import_map_query");
|
||||||
|
|
||||||
let mut import_map = collect_import_map(db, krate);
|
let map = collect_import_map(db, krate);
|
||||||
|
|
||||||
let mut importables = import_map
|
let mut importables: Vec<_> = map
|
||||||
.map
|
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(item, info)| (item, fst_path(db, &info.path)))
|
// We've only collected items, whose name cannot be tuple field.
|
||||||
.collect::<Vec<_>>();
|
.map(|(&item, info)| (item, info.name.as_str().unwrap().to_ascii_lowercase()))
|
||||||
importables.sort_by(|(_, fst_path), (_, fst_path2)| fst_path.cmp(fst_path2));
|
.collect();
|
||||||
|
importables.sort_by(|(_, lhs_name), (_, rhs_name)| lhs_name.cmp(rhs_name));
|
||||||
|
|
||||||
// Build the FST, taking care not to insert duplicate values.
|
// Build the FST, taking care not to insert duplicate values.
|
||||||
|
|
||||||
let mut builder = fst::MapBuilder::memory();
|
let mut builder = fst::MapBuilder::memory();
|
||||||
let mut last_batch_start = 0;
|
let iter = importables.iter().enumerate().dedup_by(|lhs, rhs| lhs.1 .1 == rhs.1 .1);
|
||||||
|
for (start_idx, (_, name)) in iter {
|
||||||
for idx in 0..importables.len() {
|
let _ = builder.insert(name, start_idx as u64);
|
||||||
let key = &importables[last_batch_start].1;
|
|
||||||
if let Some((_, fst_path)) = importables.get(idx + 1) {
|
|
||||||
if key == fst_path {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let _ = builder.insert(key, last_batch_start as u64);
|
Arc::new(ImportMap {
|
||||||
|
map,
|
||||||
last_batch_start = idx + 1;
|
fst: builder.into_map(),
|
||||||
}
|
importables: importables.into_iter().map(|(item, _)| item).collect(),
|
||||||
|
})
|
||||||
import_map.fst = builder.into_map();
|
|
||||||
import_map.importables = importables.iter().map(|&(&item, _)| item).collect();
|
|
||||||
|
|
||||||
Arc::new(import_map)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the `ModPath` needed to import/mention `item`, relative to this crate's root.
|
|
||||||
pub fn path_of(&self, item: ItemInNs) -> Option<&ImportPath> {
|
|
||||||
self.import_info_for(item).map(|it| &it.path)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn import_info_for(&self, item: ItemInNs) -> Option<&ImportInfo> {
|
pub fn import_info_for(&self, item: ItemInNs) -> Option<&ImportInfo> {
|
||||||
self.map.get(&item)
|
self.map.get(&item)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemInNs, ImportInfo> {
|
||||||
fn fmt_for_test(&self, db: &dyn DefDatabase) -> String {
|
let _p = profile::span("collect_import_map");
|
||||||
let mut importable_paths: Vec<_> = self
|
|
||||||
.map
|
let def_map = db.crate_def_map(krate);
|
||||||
.iter()
|
let mut map = FxIndexMap::default();
|
||||||
.map(|(item, info)| {
|
|
||||||
let ns = match item {
|
// We look only into modules that are public(ly reexported), starting with the crate root.
|
||||||
ItemInNs::Types(_) => "t",
|
let root = def_map.module_id(DefMap::ROOT);
|
||||||
ItemInNs::Values(_) => "v",
|
let mut worklist = vec![(root, 0)];
|
||||||
ItemInNs::Macros(_) => "m",
|
// Records items' minimum module depth.
|
||||||
|
let mut depth_map = FxHashMap::default();
|
||||||
|
|
||||||
|
while let Some((module, depth)) = worklist.pop() {
|
||||||
|
let ext_def_map;
|
||||||
|
let mod_data = if module.krate == krate {
|
||||||
|
&def_map[module.local_id]
|
||||||
|
} else {
|
||||||
|
// The crate might reexport a module defined in another crate.
|
||||||
|
ext_def_map = module.def_map(db);
|
||||||
|
&ext_def_map[module.local_id]
|
||||||
};
|
};
|
||||||
format!("- {} ({ns})", info.path.display(db))
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
importable_paths.sort();
|
let visible_items = mod_data.scope.entries().filter_map(|(name, per_ns)| {
|
||||||
importable_paths.join("\n")
|
let per_ns = per_ns.filter_visibility(|vis| vis == Visibility::Public);
|
||||||
|
if per_ns.is_none() { None } else { Some((name, per_ns)) }
|
||||||
|
});
|
||||||
|
|
||||||
|
for (name, per_ns) in visible_items {
|
||||||
|
for item in per_ns.iter_items() {
|
||||||
|
let import_info = ImportInfo {
|
||||||
|
name: name.clone(),
|
||||||
|
container: module,
|
||||||
|
is_trait_assoc_item: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
match depth_map.entry(item) {
|
||||||
|
Entry::Vacant(entry) => {
|
||||||
|
entry.insert(depth);
|
||||||
|
}
|
||||||
|
Entry::Occupied(mut entry) => {
|
||||||
|
if depth < *entry.get() {
|
||||||
|
entry.insert(depth);
|
||||||
|
} else {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() {
|
||||||
|
collect_trait_assoc_items(
|
||||||
|
db,
|
||||||
|
&mut map,
|
||||||
|
tr,
|
||||||
|
matches!(item, ItemInNs::Types(_)),
|
||||||
|
&import_info,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
map.insert(item, import_info);
|
||||||
|
|
||||||
|
// If we've just added a module, descend into it. We might traverse modules
|
||||||
|
// multiple times, but only if the module depth is smaller (else we `continue`
|
||||||
|
// above).
|
||||||
|
if let Some(ModuleDefId::ModuleId(mod_id)) = item.as_module_def_id() {
|
||||||
|
worklist.push((mod_id, depth + 1));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
map
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_trait_assoc_items(
|
fn collect_trait_assoc_items(
|
||||||
&mut self,
|
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
|
map: &mut FxIndexMap<ItemInNs, ImportInfo>,
|
||||||
tr: TraitId,
|
tr: TraitId,
|
||||||
is_type_in_ns: bool,
|
is_type_in_ns: bool,
|
||||||
original_import_info: &ImportInfo,
|
trait_import_info: &ImportInfo,
|
||||||
) {
|
) {
|
||||||
let _p = profile::span("collect_trait_assoc_items");
|
let _p = profile::span("collect_trait_assoc_items");
|
||||||
for (assoc_item_name, item) in &db.trait_data(tr).items {
|
for (assoc_item_name, item) in &db.trait_data(tr).items {
|
||||||
|
@ -167,86 +179,13 @@ impl ImportMap {
|
||||||
ItemInNs::Values(module_def_id)
|
ItemInNs::Values(module_def_id)
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut assoc_item_info = original_import_info.clone();
|
let assoc_item_info = ImportInfo {
|
||||||
assoc_item_info.path.segments.push(assoc_item_name.to_owned());
|
container: trait_import_info.container,
|
||||||
assoc_item_info.is_trait_assoc_item = true;
|
name: assoc_item_name.clone(),
|
||||||
self.map.insert(assoc_item, assoc_item_info);
|
is_trait_assoc_item: true,
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMap {
|
|
||||||
let _p = profile::span("collect_import_map");
|
|
||||||
|
|
||||||
let def_map = db.crate_def_map(krate);
|
|
||||||
let mut import_map = ImportMap::default();
|
|
||||||
|
|
||||||
// We look only into modules that are public(ly reexported), starting with the crate root.
|
|
||||||
let empty = ImportPath { segments: vec![] };
|
|
||||||
let root = def_map.module_id(DefMap::ROOT);
|
|
||||||
let mut worklist = vec![(root, empty)];
|
|
||||||
while let Some((module, mod_path)) = worklist.pop() {
|
|
||||||
let ext_def_map;
|
|
||||||
let mod_data = if module.krate == krate {
|
|
||||||
&def_map[module.local_id]
|
|
||||||
} else {
|
|
||||||
// The crate might reexport a module defined in another crate.
|
|
||||||
ext_def_map = module.def_map(db);
|
|
||||||
&ext_def_map[module.local_id]
|
|
||||||
};
|
};
|
||||||
|
map.insert(assoc_item, assoc_item_info);
|
||||||
let visible_items = mod_data.scope.entries().filter_map(|(name, per_ns)| {
|
|
||||||
let per_ns = per_ns.filter_visibility(|vis| vis == Visibility::Public);
|
|
||||||
if per_ns.is_none() { None } else { Some((name, per_ns)) }
|
|
||||||
});
|
|
||||||
|
|
||||||
for (name, per_ns) in visible_items {
|
|
||||||
let mk_path = || {
|
|
||||||
let mut path = mod_path.clone();
|
|
||||||
path.segments.push(name.clone());
|
|
||||||
path
|
|
||||||
};
|
|
||||||
|
|
||||||
for item in per_ns.iter_items() {
|
|
||||||
let path = mk_path();
|
|
||||||
let path_len = path.len();
|
|
||||||
let import_info =
|
|
||||||
ImportInfo { path, container: module, is_trait_assoc_item: false };
|
|
||||||
|
|
||||||
if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() {
|
|
||||||
import_map.collect_trait_assoc_items(
|
|
||||||
db,
|
|
||||||
tr,
|
|
||||||
matches!(item, ItemInNs::Types(_)),
|
|
||||||
&import_info,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
match import_map.map.entry(item) {
|
|
||||||
Entry::Vacant(entry) => {
|
|
||||||
entry.insert(import_info);
|
|
||||||
}
|
|
||||||
Entry::Occupied(mut entry) => {
|
|
||||||
// If the new path is shorter, prefer that one.
|
|
||||||
if path_len < entry.get().path.len() {
|
|
||||||
*entry.get_mut() = import_info;
|
|
||||||
} else {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we've just added a path to a module, descend into it. We might traverse
|
|
||||||
// modules multiple times, but only if the new path to it is shorter than the
|
|
||||||
// first (else we `continue` above).
|
|
||||||
if let Some(ModuleDefId::ModuleId(mod_id)) = item.as_module_def_id() {
|
|
||||||
worklist.push((mod_id, mk_path()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
import_map
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialEq for ImportMap {
|
impl PartialEq for ImportMap {
|
||||||
|
@ -260,7 +199,7 @@ impl Eq for ImportMap {}
|
||||||
|
|
||||||
impl fmt::Debug for ImportMap {
|
impl fmt::Debug for ImportMap {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let mut importable_paths: Vec<_> = self
|
let mut importable_names: Vec<_> = self
|
||||||
.map
|
.map
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(item, _)| match item {
|
.map(|(item, _)| match item {
|
||||||
|
@ -270,56 +209,40 @@ impl fmt::Debug for ImportMap {
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
importable_paths.sort();
|
importable_names.sort();
|
||||||
f.write_str(&importable_paths.join("\n"))
|
f.write_str(&importable_names.join("\n"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fst_path(db: &dyn DefDatabase, path: &ImportPath) -> String {
|
|
||||||
let _p = profile::span("fst_path");
|
|
||||||
let mut s = path.display(db).to_string();
|
|
||||||
s.make_ascii_lowercase();
|
|
||||||
s
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Eq, PartialEq, Hash)]
|
|
||||||
pub enum ImportKind {
|
|
||||||
Module,
|
|
||||||
Function,
|
|
||||||
Adt,
|
|
||||||
EnumVariant,
|
|
||||||
Const,
|
|
||||||
Static,
|
|
||||||
Trait,
|
|
||||||
TraitAlias,
|
|
||||||
TypeAlias,
|
|
||||||
BuiltinType,
|
|
||||||
AssociatedItem,
|
|
||||||
Macro,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A way to match import map contents against the search query.
|
/// A way to match import map contents against the search query.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum SearchMode {
|
enum SearchMode {
|
||||||
/// Import map entry should strictly match the query string.
|
/// Import map entry should strictly match the query string.
|
||||||
Equals,
|
Exact,
|
||||||
/// Import map entry should contain the query string.
|
|
||||||
Contains,
|
|
||||||
/// Import map entry should contain all letters from the query string,
|
/// Import map entry should contain all letters from the query string,
|
||||||
/// in the same order, but not necessary adjacent.
|
/// in the same order, but not necessary adjacent.
|
||||||
Fuzzy,
|
Fuzzy,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Three possible ways to search for the name in associated and/or other items.
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
pub enum AssocSearchMode {
|
||||||
|
/// Search for the name in both associated and other items.
|
||||||
|
Include,
|
||||||
|
/// Search for the name in other items only.
|
||||||
|
Exclude,
|
||||||
|
/// Search for the name in the associated items only.
|
||||||
|
AssocItemsOnly,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Query {
|
pub struct Query {
|
||||||
query: String,
|
query: String,
|
||||||
lowercased: String,
|
lowercased: String,
|
||||||
name_only: bool,
|
|
||||||
assoc_items_only: bool,
|
|
||||||
search_mode: SearchMode,
|
search_mode: SearchMode,
|
||||||
|
assoc_mode: AssocSearchMode,
|
||||||
case_sensitive: bool,
|
case_sensitive: bool,
|
||||||
limit: usize,
|
limit: usize,
|
||||||
exclude_import_kinds: FxHashSet<ImportKind>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Query {
|
impl Query {
|
||||||
|
@ -328,30 +251,21 @@ impl Query {
|
||||||
Self {
|
Self {
|
||||||
query,
|
query,
|
||||||
lowercased,
|
lowercased,
|
||||||
name_only: false,
|
search_mode: SearchMode::Exact,
|
||||||
assoc_items_only: false,
|
assoc_mode: AssocSearchMode::Include,
|
||||||
search_mode: SearchMode::Contains,
|
|
||||||
case_sensitive: false,
|
case_sensitive: false,
|
||||||
limit: usize::max_value(),
|
limit: usize::MAX,
|
||||||
exclude_import_kinds: FxHashSet::default(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Matches entries' names only, ignoring the rest of
|
/// Fuzzy finds items instead of exact matching.
|
||||||
/// the qualifier.
|
pub fn fuzzy(self) -> Self {
|
||||||
/// Example: for `std::marker::PhantomData`, the name is `PhantomData`.
|
Self { search_mode: SearchMode::Fuzzy, ..self }
|
||||||
pub fn name_only(self) -> Self {
|
|
||||||
Self { name_only: true, ..self }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Matches only the entries that are associated items, ignoring the rest.
|
/// Specifies whether we want to include associated items in the result.
|
||||||
pub fn assoc_items_only(self) -> Self {
|
pub fn assoc_search_mode(self, assoc_mode: AssocSearchMode) -> Self {
|
||||||
Self { assoc_items_only: true, ..self }
|
Self { assoc_mode, ..self }
|
||||||
}
|
|
||||||
|
|
||||||
/// Specifies the way to search for the entries using the query.
|
|
||||||
pub fn search_mode(self, search_mode: SearchMode) -> Self {
|
|
||||||
Self { search_mode, ..self }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Limits the returned number of items to `limit`.
|
/// Limits the returned number of items to `limit`.
|
||||||
|
@ -364,12 +278,6 @@ impl Query {
|
||||||
Self { case_sensitive: true, ..self }
|
Self { case_sensitive: true, ..self }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Do not include imports of the specified kind in the search results.
|
|
||||||
pub fn exclude_import_kind(mut self, import_kind: ImportKind) -> Self {
|
|
||||||
self.exclude_import_kinds.insert(import_kind);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
fn import_matches(
|
fn import_matches(
|
||||||
&self,
|
&self,
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
|
@ -377,49 +285,36 @@ impl Query {
|
||||||
enforce_lowercase: bool,
|
enforce_lowercase: bool,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let _p = profile::span("import_map::Query::import_matches");
|
let _p = profile::span("import_map::Query::import_matches");
|
||||||
if import.is_trait_assoc_item {
|
match (import.is_trait_assoc_item, self.assoc_mode) {
|
||||||
if self.exclude_import_kinds.contains(&ImportKind::AssociatedItem) {
|
(true, AssocSearchMode::Exclude) => return false,
|
||||||
return false;
|
(false, AssocSearchMode::AssocItemsOnly) => return false,
|
||||||
}
|
_ => {}
|
||||||
} else if self.assoc_items_only {
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut input = if import.is_trait_assoc_item || self.name_only {
|
let mut input = import.name.display(db.upcast()).to_string();
|
||||||
import.path.segments.last().unwrap().display(db.upcast()).to_string()
|
let case_insensitive = enforce_lowercase || !self.case_sensitive;
|
||||||
} else {
|
if case_insensitive {
|
||||||
import.path.display(db).to_string()
|
|
||||||
};
|
|
||||||
if enforce_lowercase || !self.case_sensitive {
|
|
||||||
input.make_ascii_lowercase();
|
input.make_ascii_lowercase();
|
||||||
}
|
}
|
||||||
|
|
||||||
let query_string =
|
let query_string = if case_insensitive { &self.lowercased } else { &self.query };
|
||||||
if !enforce_lowercase && self.case_sensitive { &self.query } else { &self.lowercased };
|
|
||||||
|
|
||||||
match self.search_mode {
|
match self.search_mode {
|
||||||
SearchMode::Equals => &input == query_string,
|
SearchMode::Exact => &input == query_string,
|
||||||
SearchMode::Contains => input.contains(query_string),
|
|
||||||
SearchMode::Fuzzy => {
|
SearchMode::Fuzzy => {
|
||||||
let mut unchecked_query_chars = query_string.chars();
|
let mut input_chars = input.chars();
|
||||||
let mut mismatching_query_char = unchecked_query_chars.next();
|
for query_char in query_string.chars() {
|
||||||
|
if input_chars.find(|&it| it == query_char).is_none() {
|
||||||
for input_char in input.chars() {
|
return false;
|
||||||
match mismatching_query_char {
|
|
||||||
None => return true,
|
|
||||||
Some(matching_query_char) if matching_query_char == input_char => {
|
|
||||||
mismatching_query_char = unchecked_query_chars.next();
|
|
||||||
}
|
|
||||||
_ => (),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
mismatching_query_char.is_none()
|
true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Searches dependencies of `krate` for an importable path matching `query`.
|
/// Searches dependencies of `krate` for an importable name matching `query`.
|
||||||
///
|
///
|
||||||
/// This returns a list of items that could be imported from dependencies of `krate`.
|
/// This returns a list of items that could be imported from dependencies of `krate`.
|
||||||
pub fn search_dependencies(
|
pub fn search_dependencies(
|
||||||
|
@ -442,13 +337,9 @@ pub fn search_dependencies(
|
||||||
|
|
||||||
let mut stream = op.union();
|
let mut stream = op.union();
|
||||||
|
|
||||||
let mut all_indexed_values = FxHashSet::default();
|
|
||||||
while let Some((_, indexed_values)) = stream.next() {
|
|
||||||
all_indexed_values.extend(indexed_values.iter().copied());
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut res = FxHashSet::default();
|
let mut res = FxHashSet::default();
|
||||||
for indexed_value in all_indexed_values {
|
while let Some((_, indexed_values)) = stream.next() {
|
||||||
|
for indexed_value in indexed_values {
|
||||||
let import_map = &import_maps[indexed_value.index];
|
let import_map = &import_maps[indexed_value.index];
|
||||||
let importables = &import_map.importables[indexed_value.value as usize..];
|
let importables = &import_map.importables[indexed_value.value as usize..];
|
||||||
|
|
||||||
|
@ -457,22 +348,20 @@ pub fn search_dependencies(
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Path shared by the importable items in this group.
|
// Name shared by the importable items in this group.
|
||||||
let common_importables_path_fst = fst_path(db, &common_importable_data.path);
|
let common_importable_name =
|
||||||
// Add the items from this `ModPath` group. Those are all subsequent items in
|
common_importable_data.name.to_smol_str().to_ascii_lowercase();
|
||||||
// `importables` whose paths match `path`.
|
// Add the items from this name group. Those are all subsequent items in
|
||||||
|
// `importables` whose name match `common_importable_name`.
|
||||||
let iter = importables
|
let iter = importables
|
||||||
.iter()
|
.iter()
|
||||||
.copied()
|
.copied()
|
||||||
.take_while(|item| {
|
.take_while(|item| {
|
||||||
common_importables_path_fst == fst_path(db, &import_map.map[item].path)
|
common_importable_name
|
||||||
})
|
== import_map.map[item].name.to_smol_str().to_ascii_lowercase()
|
||||||
.filter(|&item| match item_import_kind(item) {
|
|
||||||
Some(import_kind) => !query.exclude_import_kinds.contains(&import_kind),
|
|
||||||
None => true,
|
|
||||||
})
|
})
|
||||||
.filter(|item| {
|
.filter(|item| {
|
||||||
!query.case_sensitive // we've already checked the common importables path case-insensitively
|
!query.case_sensitive // we've already checked the common importables name case-insensitively
|
||||||
|| query.import_matches(db, &import_map.map[item], false)
|
|| query.import_matches(db, &import_map.map[item], false)
|
||||||
});
|
});
|
||||||
res.extend(iter);
|
res.extend(iter);
|
||||||
|
@ -481,24 +370,9 @@ pub fn search_dependencies(
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
res
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn item_import_kind(item: ItemInNs) -> Option<ImportKind> {
|
res
|
||||||
Some(match item.as_module_def_id()? {
|
|
||||||
ModuleDefId::ModuleId(_) => ImportKind::Module,
|
|
||||||
ModuleDefId::FunctionId(_) => ImportKind::Function,
|
|
||||||
ModuleDefId::AdtId(_) => ImportKind::Adt,
|
|
||||||
ModuleDefId::EnumVariantId(_) => ImportKind::EnumVariant,
|
|
||||||
ModuleDefId::ConstId(_) => ImportKind::Const,
|
|
||||||
ModuleDefId::StaticId(_) => ImportKind::Static,
|
|
||||||
ModuleDefId::TraitId(_) => ImportKind::Trait,
|
|
||||||
ModuleDefId::TraitAliasId(_) => ImportKind::TraitAlias,
|
|
||||||
ModuleDefId::TypeAliasId(_) => ImportKind::TypeAlias,
|
|
||||||
ModuleDefId::BuiltinType(_) => ImportKind::BuiltinType,
|
|
||||||
ModuleDefId::MacroId(_) => ImportKind::Macro,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -510,16 +384,39 @@ mod tests {
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
impl ImportMap {
|
||||||
|
fn fmt_for_test(&self, db: &dyn DefDatabase) -> String {
|
||||||
|
let mut importable_paths: Vec<_> = self
|
||||||
|
.map
|
||||||
|
.iter()
|
||||||
|
.map(|(item, info)| {
|
||||||
|
let path = render_path(db, info);
|
||||||
|
let ns = match item {
|
||||||
|
ItemInNs::Types(_) => "t",
|
||||||
|
ItemInNs::Values(_) => "v",
|
||||||
|
ItemInNs::Macros(_) => "m",
|
||||||
|
};
|
||||||
|
format!("- {path} ({ns})")
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
importable_paths.sort();
|
||||||
|
importable_paths.join("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn check_search(ra_fixture: &str, crate_name: &str, query: Query, expect: Expect) {
|
fn check_search(ra_fixture: &str, crate_name: &str, query: Query, expect: Expect) {
|
||||||
let db = TestDB::with_files(ra_fixture);
|
let db = TestDB::with_files(ra_fixture);
|
||||||
let crate_graph = db.crate_graph();
|
let crate_graph = db.crate_graph();
|
||||||
let krate = crate_graph
|
let krate = crate_graph
|
||||||
.iter()
|
.iter()
|
||||||
.find(|krate| {
|
.find(|&krate| {
|
||||||
crate_graph[*krate].display_name.as_ref().map(|n| n.to_string())
|
crate_graph[krate]
|
||||||
== Some(crate_name.to_string())
|
.display_name
|
||||||
|
.as_ref()
|
||||||
|
.is_some_and(|it| &**it.crate_name() == crate_name)
|
||||||
})
|
})
|
||||||
.unwrap();
|
.expect("could not find crate");
|
||||||
|
|
||||||
let actual = search_dependencies(db.upcast(), krate, query)
|
let actual = search_dependencies(db.upcast(), krate, query)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -530,7 +427,7 @@ mod tests {
|
||||||
let (path, mark) = match assoc_item_path(&db, &dependency_imports, dependency) {
|
let (path, mark) = match assoc_item_path(&db, &dependency_imports, dependency) {
|
||||||
Some(assoc_item_path) => (assoc_item_path, "a"),
|
Some(assoc_item_path) => (assoc_item_path, "a"),
|
||||||
None => (
|
None => (
|
||||||
dependency_imports.path_of(dependency)?.display(&db).to_string(),
|
render_path(&db, dependency_imports.import_info_for(dependency)?),
|
||||||
match dependency {
|
match dependency {
|
||||||
ItemInNs::Types(ModuleDefId::FunctionId(_))
|
ItemInNs::Types(ModuleDefId::FunctionId(_))
|
||||||
| ItemInNs::Values(ModuleDefId::FunctionId(_)) => "f",
|
| ItemInNs::Values(ModuleDefId::FunctionId(_)) => "f",
|
||||||
|
@ -560,57 +457,25 @@ mod tests {
|
||||||
dependency_imports: &ImportMap,
|
dependency_imports: &ImportMap,
|
||||||
dependency: ItemInNs,
|
dependency: ItemInNs,
|
||||||
) -> Option<String> {
|
) -> Option<String> {
|
||||||
let dependency_assoc_item_id = match dependency {
|
let (dependency_assoc_item_id, container) = match dependency.as_module_def_id()? {
|
||||||
ItemInNs::Types(ModuleDefId::FunctionId(id))
|
ModuleDefId::FunctionId(id) => (AssocItemId::from(id), id.lookup(db).container),
|
||||||
| ItemInNs::Values(ModuleDefId::FunctionId(id)) => AssocItemId::from(id),
|
ModuleDefId::ConstId(id) => (AssocItemId::from(id), id.lookup(db).container),
|
||||||
ItemInNs::Types(ModuleDefId::ConstId(id))
|
ModuleDefId::TypeAliasId(id) => (AssocItemId::from(id), id.lookup(db).container),
|
||||||
| ItemInNs::Values(ModuleDefId::ConstId(id)) => AssocItemId::from(id),
|
|
||||||
ItemInNs::Types(ModuleDefId::TypeAliasId(id))
|
|
||||||
| ItemInNs::Values(ModuleDefId::TypeAliasId(id)) => AssocItemId::from(id),
|
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let trait_ = assoc_to_trait(db, dependency)?;
|
let ItemContainerId::TraitId(trait_id) = container else {
|
||||||
if let ModuleDefId::TraitId(tr) = trait_.as_module_def_id()? {
|
return None;
|
||||||
let trait_data = db.trait_data(tr);
|
|
||||||
let assoc_item_name =
|
|
||||||
trait_data.items.iter().find_map(|(assoc_item_name, assoc_item_id)| {
|
|
||||||
if &dependency_assoc_item_id == assoc_item_id {
|
|
||||||
Some(assoc_item_name)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
return Some(format!(
|
|
||||||
"{}::{}",
|
|
||||||
dependency_imports.path_of(trait_)?.display(db),
|
|
||||||
assoc_item_name.display(db.upcast())
|
|
||||||
));
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
fn assoc_to_trait(db: &dyn DefDatabase, item: ItemInNs) -> Option<ItemInNs> {
|
|
||||||
let assoc: AssocItemId = match item {
|
|
||||||
ItemInNs::Types(it) | ItemInNs::Values(it) => match it {
|
|
||||||
ModuleDefId::TypeAliasId(it) => it.into(),
|
|
||||||
ModuleDefId::FunctionId(it) => it.into(),
|
|
||||||
ModuleDefId::ConstId(it) => it.into(),
|
|
||||||
_ => return None,
|
|
||||||
},
|
|
||||||
_ => return None,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let container = match assoc {
|
let trait_info = dependency_imports.import_info_for(ItemInNs::Types(trait_id.into()))?;
|
||||||
AssocItemId::FunctionId(it) => it.lookup(db).container,
|
|
||||||
AssocItemId::ConstId(it) => it.lookup(db).container,
|
|
||||||
AssocItemId::TypeAliasId(it) => it.lookup(db).container,
|
|
||||||
};
|
|
||||||
|
|
||||||
match container {
|
let trait_data = db.trait_data(trait_id);
|
||||||
ItemContainerId::TraitId(it) => Some(ItemInNs::Types(it.into())),
|
let (assoc_item_name, _) = trait_data
|
||||||
_ => None,
|
.items
|
||||||
}
|
.iter()
|
||||||
|
.find(|(_, assoc_item_id)| &dependency_assoc_item_id == assoc_item_id)?;
|
||||||
|
Some(format!("{}::{}", render_path(db, trait_info), assoc_item_name.display(db.upcast())))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check(ra_fixture: &str, expect: Expect) {
|
fn check(ra_fixture: &str, expect: Expect) {
|
||||||
|
@ -633,6 +498,24 @@ mod tests {
|
||||||
expect.assert_eq(&actual)
|
expect.assert_eq(&actual)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn render_path(db: &dyn DefDatabase, info: &ImportInfo) -> String {
|
||||||
|
let mut module = info.container;
|
||||||
|
let mut segments = vec![&info.name];
|
||||||
|
|
||||||
|
let def_map = module.def_map(db);
|
||||||
|
assert!(def_map.block_id().is_none(), "block local items should not be in `ImportMap`");
|
||||||
|
|
||||||
|
while let Some(parent) = module.containing_module(db) {
|
||||||
|
let parent_data = &def_map[parent.local_id];
|
||||||
|
let (name, _) =
|
||||||
|
parent_data.children.iter().find(|(_, id)| **id == module.local_id).unwrap();
|
||||||
|
segments.push(name);
|
||||||
|
module = parent;
|
||||||
|
}
|
||||||
|
|
||||||
|
segments.iter().rev().map(|it| it.display(db.upcast())).join("::")
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn smoke() {
|
fn smoke() {
|
||||||
check(
|
check(
|
||||||
|
@ -749,6 +632,7 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn module_reexport() {
|
fn module_reexport() {
|
||||||
// Reexporting modules from a dependency adds all contents to the import map.
|
// Reexporting modules from a dependency adds all contents to the import map.
|
||||||
|
// XXX: The rendered paths are relative to the defining crate.
|
||||||
check(
|
check(
|
||||||
r"
|
r"
|
||||||
//- /main.rs crate:main deps:lib
|
//- /main.rs crate:main deps:lib
|
||||||
|
@ -764,9 +648,9 @@ mod tests {
|
||||||
- module::S (t)
|
- module::S (t)
|
||||||
- module::S (v)
|
- module::S (v)
|
||||||
main:
|
main:
|
||||||
|
- module::S (t)
|
||||||
|
- module::S (v)
|
||||||
- reexported_module (t)
|
- reexported_module (t)
|
||||||
- reexported_module::S (t)
|
|
||||||
- reexported_module::S (v)
|
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -868,10 +752,9 @@ mod tests {
|
||||||
check_search(
|
check_search(
|
||||||
ra_fixture,
|
ra_fixture,
|
||||||
"main",
|
"main",
|
||||||
Query::new("fmt".to_string()).search_mode(SearchMode::Fuzzy),
|
Query::new("fmt".to_string()).fuzzy(),
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
dep::fmt (t)
|
dep::fmt (t)
|
||||||
dep::fmt::Display (t)
|
|
||||||
dep::fmt::Display::FMT_CONST (a)
|
dep::fmt::Display::FMT_CONST (a)
|
||||||
dep::fmt::Display::format_function (a)
|
dep::fmt::Display::format_function (a)
|
||||||
dep::fmt::Display::format_method (a)
|
dep::fmt::Display::format_method (a)
|
||||||
|
@ -898,7 +781,9 @@ mod tests {
|
||||||
check_search(
|
check_search(
|
||||||
ra_fixture,
|
ra_fixture,
|
||||||
"main",
|
"main",
|
||||||
Query::new("fmt".to_string()).search_mode(SearchMode::Fuzzy).assoc_items_only(),
|
Query::new("fmt".to_string())
|
||||||
|
.fuzzy()
|
||||||
|
.assoc_search_mode(AssocSearchMode::AssocItemsOnly),
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
dep::fmt::Display::FMT_CONST (a)
|
dep::fmt::Display::FMT_CONST (a)
|
||||||
dep::fmt::Display::format_function (a)
|
dep::fmt::Display::format_function (a)
|
||||||
|
@ -909,24 +794,11 @@ mod tests {
|
||||||
check_search(
|
check_search(
|
||||||
ra_fixture,
|
ra_fixture,
|
||||||
"main",
|
"main",
|
||||||
Query::new("fmt".to_string())
|
Query::new("fmt".to_string()).fuzzy().assoc_search_mode(AssocSearchMode::Exclude),
|
||||||
.search_mode(SearchMode::Fuzzy)
|
|
||||||
.exclude_import_kind(ImportKind::AssociatedItem),
|
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
dep::fmt (t)
|
dep::fmt (t)
|
||||||
dep::fmt::Display (t)
|
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
|
|
||||||
check_search(
|
|
||||||
ra_fixture,
|
|
||||||
"main",
|
|
||||||
Query::new("fmt".to_string())
|
|
||||||
.search_mode(SearchMode::Fuzzy)
|
|
||||||
.assoc_items_only()
|
|
||||||
.exclude_import_kind(ImportKind::AssociatedItem),
|
|
||||||
expect![[r#""#]],
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -958,13 +830,12 @@ mod tests {
|
||||||
check_search(
|
check_search(
|
||||||
ra_fixture,
|
ra_fixture,
|
||||||
"main",
|
"main",
|
||||||
Query::new("fmt".to_string()).search_mode(SearchMode::Fuzzy),
|
Query::new("fmt".to_string()).fuzzy(),
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
dep::Fmt (m)
|
dep::Fmt (m)
|
||||||
dep::Fmt (t)
|
dep::Fmt (t)
|
||||||
dep::Fmt (v)
|
dep::Fmt (v)
|
||||||
dep::fmt (t)
|
dep::fmt (t)
|
||||||
dep::fmt::Display (t)
|
|
||||||
dep::fmt::Display::fmt (a)
|
dep::fmt::Display::fmt (a)
|
||||||
dep::format (f)
|
dep::format (f)
|
||||||
"#]],
|
"#]],
|
||||||
|
@ -973,7 +844,7 @@ mod tests {
|
||||||
check_search(
|
check_search(
|
||||||
ra_fixture,
|
ra_fixture,
|
||||||
"main",
|
"main",
|
||||||
Query::new("fmt".to_string()).search_mode(SearchMode::Equals),
|
Query::new("fmt".to_string()),
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
dep::Fmt (m)
|
dep::Fmt (m)
|
||||||
dep::Fmt (t)
|
dep::Fmt (t)
|
||||||
|
@ -982,20 +853,6 @@ mod tests {
|
||||||
dep::fmt::Display::fmt (a)
|
dep::fmt::Display::fmt (a)
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
|
|
||||||
check_search(
|
|
||||||
ra_fixture,
|
|
||||||
"main",
|
|
||||||
Query::new("fmt".to_string()).search_mode(SearchMode::Contains),
|
|
||||||
expect![[r#"
|
|
||||||
dep::Fmt (m)
|
|
||||||
dep::Fmt (t)
|
|
||||||
dep::Fmt (v)
|
|
||||||
dep::fmt (t)
|
|
||||||
dep::fmt::Display (t)
|
|
||||||
dep::fmt::Display::fmt (a)
|
|
||||||
"#]],
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -1033,7 +890,6 @@ mod tests {
|
||||||
dep::Fmt (t)
|
dep::Fmt (t)
|
||||||
dep::Fmt (v)
|
dep::Fmt (v)
|
||||||
dep::fmt (t)
|
dep::fmt (t)
|
||||||
dep::fmt::Display (t)
|
|
||||||
dep::fmt::Display::fmt (a)
|
dep::fmt::Display::fmt (a)
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
|
@ -1041,7 +897,7 @@ mod tests {
|
||||||
check_search(
|
check_search(
|
||||||
ra_fixture,
|
ra_fixture,
|
||||||
"main",
|
"main",
|
||||||
Query::new("fmt".to_string()).name_only(),
|
Query::new("fmt".to_string()),
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
dep::Fmt (m)
|
dep::Fmt (m)
|
||||||
dep::Fmt (t)
|
dep::Fmt (t)
|
||||||
|
@ -1106,43 +962,10 @@ mod tests {
|
||||||
pub fn no() {}
|
pub fn no() {}
|
||||||
"#,
|
"#,
|
||||||
"main",
|
"main",
|
||||||
Query::new("".to_string()).limit(2),
|
Query::new("".to_string()).fuzzy().limit(1),
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
dep::Fmt (m)
|
dep::fmt::Display (t)
|
||||||
dep::Fmt (t)
|
|
||||||
dep::Fmt (v)
|
|
||||||
dep::fmt (t)
|
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn search_exclusions() {
|
|
||||||
let ra_fixture = r#"
|
|
||||||
//- /main.rs crate:main deps:dep
|
|
||||||
//- /dep.rs crate:dep
|
|
||||||
|
|
||||||
pub struct fmt;
|
|
||||||
pub struct FMT;
|
|
||||||
"#;
|
|
||||||
|
|
||||||
check_search(
|
|
||||||
ra_fixture,
|
|
||||||
"main",
|
|
||||||
Query::new("FMT".to_string()),
|
|
||||||
expect![[r#"
|
|
||||||
dep::FMT (t)
|
|
||||||
dep::FMT (v)
|
|
||||||
dep::fmt (t)
|
|
||||||
dep::fmt (v)
|
|
||||||
"#]],
|
|
||||||
);
|
|
||||||
|
|
||||||
check_search(
|
|
||||||
ra_fixture,
|
|
||||||
"main",
|
|
||||||
Query::new("FMT".to_string()).exclude_import_kind(ImportKind::Adt),
|
|
||||||
expect![[r#""#]],
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,8 +14,8 @@ use stdx::format_to;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType, ConstId, HasModule,
|
db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType, ConstId,
|
||||||
ImplId, LocalModuleId, MacroId, ModuleDefId, ModuleId, TraitId,
|
ExternCrateId, HasModule, ImplId, LocalModuleId, MacroId, ModuleDefId, ModuleId, TraitId,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug)]
|
#[derive(Copy, Clone, Debug)]
|
||||||
|
@ -50,6 +50,7 @@ pub struct ItemScope {
|
||||||
unnamed_consts: Vec<ConstId>,
|
unnamed_consts: Vec<ConstId>,
|
||||||
/// Traits imported via `use Trait as _;`.
|
/// Traits imported via `use Trait as _;`.
|
||||||
unnamed_trait_imports: FxHashMap<TraitId, Visibility>,
|
unnamed_trait_imports: FxHashMap<TraitId, Visibility>,
|
||||||
|
extern_crate_decls: Vec<ExternCrateId>,
|
||||||
/// Macros visible in current module in legacy textual scope
|
/// Macros visible in current module in legacy textual scope
|
||||||
///
|
///
|
||||||
/// For macros invoked by an unqualified identifier like `bar!()`, `legacy_macros` will be searched in first.
|
/// For macros invoked by an unqualified identifier like `bar!()`, `legacy_macros` will be searched in first.
|
||||||
|
@ -188,7 +189,11 @@ impl ItemScope {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn define_impl(&mut self, imp: ImplId) {
|
pub(crate) fn define_impl(&mut self, imp: ImplId) {
|
||||||
self.impls.push(imp)
|
self.impls.push(imp);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn define_extern_crate_decl(&mut self, extern_crate: ExternCrateId) {
|
||||||
|
self.extern_crate_decls.push(extern_crate);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn define_unnamed_const(&mut self, konst: ConstId) {
|
pub(crate) fn define_unnamed_const(&mut self, konst: ConstId) {
|
||||||
|
@ -397,6 +402,7 @@ impl ItemScope {
|
||||||
legacy_macros,
|
legacy_macros,
|
||||||
attr_macros,
|
attr_macros,
|
||||||
derive_macros,
|
derive_macros,
|
||||||
|
extern_crate_decls,
|
||||||
} = self;
|
} = self;
|
||||||
types.shrink_to_fit();
|
types.shrink_to_fit();
|
||||||
values.shrink_to_fit();
|
values.shrink_to_fit();
|
||||||
|
@ -409,6 +415,7 @@ impl ItemScope {
|
||||||
legacy_macros.shrink_to_fit();
|
legacy_macros.shrink_to_fit();
|
||||||
attr_macros.shrink_to_fit();
|
attr_macros.shrink_to_fit();
|
||||||
derive_macros.shrink_to_fit();
|
derive_macros.shrink_to_fit();
|
||||||
|
extern_crate_decls.shrink_to_fit();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -46,7 +46,7 @@ use ast::{AstNode, HasName, StructKind};
|
||||||
use base_db::CrateId;
|
use base_db::CrateId;
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
ast_id_map::FileAstId,
|
ast_id_map::{AstIdNode, FileAstId},
|
||||||
attrs::RawAttrs,
|
attrs::RawAttrs,
|
||||||
hygiene::Hygiene,
|
hygiene::Hygiene,
|
||||||
name::{name, AsName, Name},
|
name::{name, AsName, Name},
|
||||||
|
@ -314,7 +314,7 @@ from_attrs!(ModItem(ModItem), Variant(Idx<Variant>), Field(Idx<Field>), Param(Id
|
||||||
|
|
||||||
/// Trait implemented by all item nodes in the item tree.
|
/// Trait implemented by all item nodes in the item tree.
|
||||||
pub trait ItemTreeNode: Clone {
|
pub trait ItemTreeNode: Clone {
|
||||||
type Source: AstNode + Into<ast::Item>;
|
type Source: AstIdNode + Into<ast::Item>;
|
||||||
|
|
||||||
fn ast_id(&self) -> FileAstId<Self::Source>;
|
fn ast_id(&self) -> FileAstId<Self::Source>;
|
||||||
|
|
||||||
|
|
|
@ -52,7 +52,7 @@ struct Printer<'a> {
|
||||||
needs_indent: bool,
|
needs_indent: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Printer<'a> {
|
impl Printer<'_> {
|
||||||
fn indented(&mut self, f: impl FnOnce(&mut Self)) {
|
fn indented(&mut self, f: impl FnOnce(&mut Self)) {
|
||||||
self.indent_level += 1;
|
self.indent_level += 1;
|
||||||
wln!(self);
|
wln!(self);
|
||||||
|
@ -572,7 +572,7 @@ impl<'a> Printer<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Write for Printer<'a> {
|
impl Write for Printer<'_> {
|
||||||
fn write_str(&mut self, s: &str) -> fmt::Result {
|
fn write_str(&mut self, s: &str) -> fmt::Result {
|
||||||
for line in s.split_inclusive('\n') {
|
for line in s.split_inclusive('\n') {
|
||||||
if self.needs_indent {
|
if self.needs_indent {
|
||||||
|
|
|
@ -180,15 +180,15 @@ impl LangItems {
|
||||||
T: Into<AttrDefId> + Copy,
|
T: Into<AttrDefId> + Copy,
|
||||||
{
|
{
|
||||||
let _p = profile::span("collect_lang_item");
|
let _p = profile::span("collect_lang_item");
|
||||||
if let Some(lang_item) = lang_attr(db, item) {
|
if let Some(lang_item) = db.lang_attr(item.into()) {
|
||||||
self.items.entry(lang_item).or_insert_with(|| constructor(item));
|
self.items.entry(lang_item).or_insert_with(|| constructor(item));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn lang_attr(db: &dyn DefDatabase, item: impl Into<AttrDefId> + Copy) -> Option<LangItem> {
|
pub(crate) fn lang_attr_query(db: &dyn DefDatabase, item: AttrDefId) -> Option<LangItem> {
|
||||||
let attrs = db.attrs(item.into());
|
let attrs = db.attrs(item);
|
||||||
attrs.by_key("lang").string_value().cloned().and_then(|it| LangItem::from_str(&it))
|
attrs.by_key("lang").string_value().and_then(|it| LangItem::from_str(&it))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum GenericRequirement {
|
pub enum GenericRequirement {
|
||||||
|
|
|
@ -64,7 +64,7 @@ use std::{
|
||||||
|
|
||||||
use base_db::{impl_intern_key, salsa, CrateId, ProcMacroKind};
|
use base_db::{impl_intern_key, salsa, CrateId, ProcMacroKind};
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
ast_id_map::FileAstId,
|
ast_id_map::{AstIdNode, FileAstId},
|
||||||
attrs::{Attr, AttrId, AttrInput},
|
attrs::{Attr, AttrId, AttrInput},
|
||||||
builtin_attr_macro::BuiltinAttrExpander,
|
builtin_attr_macro::BuiltinAttrExpander,
|
||||||
builtin_derive_macro::BuiltinDeriveExpander,
|
builtin_derive_macro::BuiltinDeriveExpander,
|
||||||
|
@ -88,8 +88,8 @@ use crate::{
|
||||||
builtin_type::BuiltinType,
|
builtin_type::BuiltinType,
|
||||||
data::adt::VariantData,
|
data::adt::VariantData,
|
||||||
item_tree::{
|
item_tree::{
|
||||||
Const, Enum, Function, Impl, ItemTreeId, ItemTreeNode, MacroDef, MacroRules, Static,
|
Const, Enum, ExternCrate, Function, Impl, Import, ItemTreeId, ItemTreeNode, MacroDef,
|
||||||
Struct, Trait, TraitAlias, TypeAlias, Union,
|
MacroRules, Static, Struct, Trait, TraitAlias, TypeAlias, Union,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -145,24 +145,28 @@ pub struct ModuleId {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ModuleId {
|
impl ModuleId {
|
||||||
pub fn def_map(&self, db: &dyn db::DefDatabase) -> Arc<DefMap> {
|
pub fn def_map(self, db: &dyn db::DefDatabase) -> Arc<DefMap> {
|
||||||
match self.block {
|
match self.block {
|
||||||
Some(block) => db.block_def_map(block),
|
Some(block) => db.block_def_map(block),
|
||||||
None => db.crate_def_map(self.krate),
|
None => db.crate_def_map(self.krate),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn krate(&self) -> CrateId {
|
pub fn krate(self) -> CrateId {
|
||||||
self.krate
|
self.krate
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn containing_module(&self, db: &dyn db::DefDatabase) -> Option<ModuleId> {
|
pub fn containing_module(self, db: &dyn db::DefDatabase) -> Option<ModuleId> {
|
||||||
self.def_map(db).containing_module(self.local_id)
|
self.def_map(db).containing_module(self.local_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn containing_block(&self) -> Option<BlockId> {
|
pub fn containing_block(self) -> Option<BlockId> {
|
||||||
self.block
|
self.block
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_block_module(self) -> bool {
|
||||||
|
self.block.is_some() && self.local_id == DefMap::ROOT
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An ID of a module, **local** to a `DefMap`.
|
/// An ID of a module, **local** to a `DefMap`.
|
||||||
|
@ -313,6 +317,16 @@ pub struct ImplId(salsa::InternId);
|
||||||
type ImplLoc = ItemLoc<Impl>;
|
type ImplLoc = ItemLoc<Impl>;
|
||||||
impl_intern!(ImplId, ImplLoc, intern_impl, lookup_intern_impl);
|
impl_intern!(ImplId, ImplLoc, intern_impl, lookup_intern_impl);
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
|
||||||
|
pub struct ImportId(salsa::InternId);
|
||||||
|
type ImportLoc = ItemLoc<Import>;
|
||||||
|
impl_intern!(ImportId, ImportLoc, intern_import, lookup_intern_import);
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
|
||||||
|
pub struct ExternCrateId(salsa::InternId);
|
||||||
|
type ExternCrateLoc = ItemLoc<ExternCrate>;
|
||||||
|
impl_intern!(ExternCrateId, ExternCrateLoc, intern_extern_crate, lookup_intern_extern_crate);
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
|
||||||
pub struct ExternBlockId(salsa::InternId);
|
pub struct ExternBlockId(salsa::InternId);
|
||||||
type ExternBlockLoc = ItemLoc<ExternBlock>;
|
type ExternBlockLoc = ItemLoc<ExternBlock>;
|
||||||
|
@ -392,14 +406,14 @@ impl TypeParamId {
|
||||||
|
|
||||||
impl TypeParamId {
|
impl TypeParamId {
|
||||||
/// Caller should check if this toc id really belongs to a type
|
/// Caller should check if this toc id really belongs to a type
|
||||||
pub fn from_unchecked(x: TypeOrConstParamId) -> Self {
|
pub fn from_unchecked(it: TypeOrConstParamId) -> Self {
|
||||||
Self(x)
|
Self(it)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<TypeParamId> for TypeOrConstParamId {
|
impl From<TypeParamId> for TypeOrConstParamId {
|
||||||
fn from(x: TypeParamId) -> Self {
|
fn from(it: TypeParamId) -> Self {
|
||||||
x.0
|
it.0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -418,14 +432,14 @@ impl ConstParamId {
|
||||||
|
|
||||||
impl ConstParamId {
|
impl ConstParamId {
|
||||||
/// Caller should check if this toc id really belongs to a const
|
/// Caller should check if this toc id really belongs to a const
|
||||||
pub fn from_unchecked(x: TypeOrConstParamId) -> Self {
|
pub fn from_unchecked(it: TypeOrConstParamId) -> Self {
|
||||||
Self(x)
|
Self(it)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<ConstParamId> for TypeOrConstParamId {
|
impl From<ConstParamId> for TypeOrConstParamId {
|
||||||
fn from(x: ConstParamId) -> Self {
|
fn from(it: ConstParamId) -> Self {
|
||||||
x.0
|
it.0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -548,14 +562,14 @@ pub enum TypeOwnerId {
|
||||||
impl TypeOwnerId {
|
impl TypeOwnerId {
|
||||||
fn as_generic_def_id(self) -> Option<GenericDefId> {
|
fn as_generic_def_id(self) -> Option<GenericDefId> {
|
||||||
Some(match self {
|
Some(match self {
|
||||||
TypeOwnerId::FunctionId(x) => GenericDefId::FunctionId(x),
|
TypeOwnerId::FunctionId(it) => GenericDefId::FunctionId(it),
|
||||||
TypeOwnerId::ConstId(x) => GenericDefId::ConstId(x),
|
TypeOwnerId::ConstId(it) => GenericDefId::ConstId(it),
|
||||||
TypeOwnerId::AdtId(x) => GenericDefId::AdtId(x),
|
TypeOwnerId::AdtId(it) => GenericDefId::AdtId(it),
|
||||||
TypeOwnerId::TraitId(x) => GenericDefId::TraitId(x),
|
TypeOwnerId::TraitId(it) => GenericDefId::TraitId(it),
|
||||||
TypeOwnerId::TraitAliasId(x) => GenericDefId::TraitAliasId(x),
|
TypeOwnerId::TraitAliasId(it) => GenericDefId::TraitAliasId(it),
|
||||||
TypeOwnerId::TypeAliasId(x) => GenericDefId::TypeAliasId(x),
|
TypeOwnerId::TypeAliasId(it) => GenericDefId::TypeAliasId(it),
|
||||||
TypeOwnerId::ImplId(x) => GenericDefId::ImplId(x),
|
TypeOwnerId::ImplId(it) => GenericDefId::ImplId(it),
|
||||||
TypeOwnerId::EnumVariantId(x) => GenericDefId::EnumVariantId(x),
|
TypeOwnerId::EnumVariantId(it) => GenericDefId::EnumVariantId(it),
|
||||||
TypeOwnerId::InTypeConstId(_) | TypeOwnerId::ModuleId(_) | TypeOwnerId::StaticId(_) => {
|
TypeOwnerId::InTypeConstId(_) | TypeOwnerId::ModuleId(_) | TypeOwnerId::StaticId(_) => {
|
||||||
return None
|
return None
|
||||||
}
|
}
|
||||||
|
@ -578,15 +592,15 @@ impl_from!(
|
||||||
for TypeOwnerId
|
for TypeOwnerId
|
||||||
);
|
);
|
||||||
|
|
||||||
// Every `DefWithBodyId` is a type owner, since bodies can contain type (e.g. `{ let x: Type = _; }`)
|
// Every `DefWithBodyId` is a type owner, since bodies can contain type (e.g. `{ let it: Type = _; }`)
|
||||||
impl From<DefWithBodyId> for TypeOwnerId {
|
impl From<DefWithBodyId> for TypeOwnerId {
|
||||||
fn from(value: DefWithBodyId) -> Self {
|
fn from(value: DefWithBodyId) -> Self {
|
||||||
match value {
|
match value {
|
||||||
DefWithBodyId::FunctionId(x) => x.into(),
|
DefWithBodyId::FunctionId(it) => it.into(),
|
||||||
DefWithBodyId::StaticId(x) => x.into(),
|
DefWithBodyId::StaticId(it) => it.into(),
|
||||||
DefWithBodyId::ConstId(x) => x.into(),
|
DefWithBodyId::ConstId(it) => it.into(),
|
||||||
DefWithBodyId::InTypeConstId(x) => x.into(),
|
DefWithBodyId::InTypeConstId(it) => it.into(),
|
||||||
DefWithBodyId::VariantId(x) => x.into(),
|
DefWithBodyId::VariantId(it) => it.into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -594,14 +608,14 @@ impl From<DefWithBodyId> for TypeOwnerId {
|
||||||
impl From<GenericDefId> for TypeOwnerId {
|
impl From<GenericDefId> for TypeOwnerId {
|
||||||
fn from(value: GenericDefId) -> Self {
|
fn from(value: GenericDefId) -> Self {
|
||||||
match value {
|
match value {
|
||||||
GenericDefId::FunctionId(x) => x.into(),
|
GenericDefId::FunctionId(it) => it.into(),
|
||||||
GenericDefId::AdtId(x) => x.into(),
|
GenericDefId::AdtId(it) => it.into(),
|
||||||
GenericDefId::TraitId(x) => x.into(),
|
GenericDefId::TraitId(it) => it.into(),
|
||||||
GenericDefId::TraitAliasId(x) => x.into(),
|
GenericDefId::TraitAliasId(it) => it.into(),
|
||||||
GenericDefId::TypeAliasId(x) => x.into(),
|
GenericDefId::TypeAliasId(it) => it.into(),
|
||||||
GenericDefId::ImplId(x) => x.into(),
|
GenericDefId::ImplId(it) => it.into(),
|
||||||
GenericDefId::EnumVariantId(x) => x.into(),
|
GenericDefId::EnumVariantId(it) => it.into(),
|
||||||
GenericDefId::ConstId(x) => x.into(),
|
GenericDefId::ConstId(it) => it.into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -716,7 +730,7 @@ impl GeneralConstId {
|
||||||
.const_data(const_id)
|
.const_data(const_id)
|
||||||
.name
|
.name
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|x| x.as_str())
|
.and_then(|it| it.as_str())
|
||||||
.unwrap_or("_")
|
.unwrap_or("_")
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
GeneralConstId::ConstBlockId(id) => format!("{{anonymous const {id:?}}}"),
|
GeneralConstId::ConstBlockId(id) => format!("{{anonymous const {id:?}}}"),
|
||||||
|
@ -821,6 +835,7 @@ pub enum AttrDefId {
|
||||||
ImplId(ImplId),
|
ImplId(ImplId),
|
||||||
GenericParamId(GenericParamId),
|
GenericParamId(GenericParamId),
|
||||||
ExternBlockId(ExternBlockId),
|
ExternBlockId(ExternBlockId),
|
||||||
|
ExternCrateId(ExternCrateId),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl_from!(
|
impl_from!(
|
||||||
|
@ -835,7 +850,8 @@ impl_from!(
|
||||||
TypeAliasId,
|
TypeAliasId,
|
||||||
MacroId(Macro2Id, MacroRulesId, ProcMacroId),
|
MacroId(Macro2Id, MacroRulesId, ProcMacroId),
|
||||||
ImplId,
|
ImplId,
|
||||||
GenericParamId
|
GenericParamId,
|
||||||
|
ExternCrateId
|
||||||
for AttrDefId
|
for AttrDefId
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -927,6 +943,12 @@ impl HasModule for AdtId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl HasModule for ExternCrateId {
|
||||||
|
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
|
||||||
|
self.lookup(db).container
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl HasModule for VariantId {
|
impl HasModule for VariantId {
|
||||||
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
|
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
|
||||||
match self {
|
match self {
|
||||||
|
@ -950,17 +972,17 @@ impl HasModule for MacroId {
|
||||||
impl HasModule for TypeOwnerId {
|
impl HasModule for TypeOwnerId {
|
||||||
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
|
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
|
||||||
match self {
|
match self {
|
||||||
TypeOwnerId::FunctionId(x) => x.lookup(db).module(db),
|
TypeOwnerId::FunctionId(it) => it.lookup(db).module(db),
|
||||||
TypeOwnerId::StaticId(x) => x.lookup(db).module(db),
|
TypeOwnerId::StaticId(it) => it.lookup(db).module(db),
|
||||||
TypeOwnerId::ConstId(x) => x.lookup(db).module(db),
|
TypeOwnerId::ConstId(it) => it.lookup(db).module(db),
|
||||||
TypeOwnerId::InTypeConstId(x) => x.lookup(db).owner.module(db),
|
TypeOwnerId::InTypeConstId(it) => it.lookup(db).owner.module(db),
|
||||||
TypeOwnerId::AdtId(x) => x.module(db),
|
TypeOwnerId::AdtId(it) => it.module(db),
|
||||||
TypeOwnerId::TraitId(x) => x.lookup(db).container,
|
TypeOwnerId::TraitId(it) => it.lookup(db).container,
|
||||||
TypeOwnerId::TraitAliasId(x) => x.lookup(db).container,
|
TypeOwnerId::TraitAliasId(it) => it.lookup(db).container,
|
||||||
TypeOwnerId::TypeAliasId(x) => x.lookup(db).module(db),
|
TypeOwnerId::TypeAliasId(it) => it.lookup(db).module(db),
|
||||||
TypeOwnerId::ImplId(x) => x.lookup(db).container,
|
TypeOwnerId::ImplId(it) => it.lookup(db).container,
|
||||||
TypeOwnerId::EnumVariantId(x) => x.parent.lookup(db).container,
|
TypeOwnerId::EnumVariantId(it) => it.parent.lookup(db).container,
|
||||||
TypeOwnerId::ModuleId(x) => *x,
|
TypeOwnerId::ModuleId(it) => *it,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1050,6 +1072,7 @@ impl AttrDefId {
|
||||||
.krate
|
.krate
|
||||||
}
|
}
|
||||||
AttrDefId::MacroId(it) => it.module(db).krate,
|
AttrDefId::MacroId(it) => it.module(db).krate,
|
||||||
|
AttrDefId::ExternCrateId(it) => it.lookup(db).container.krate,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1101,12 +1124,12 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
|
||||||
|
|
||||||
/// Helper wrapper for `AstId` with `ModPath`
|
/// Helper wrapper for `AstId` with `ModPath`
|
||||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
struct AstIdWithPath<T: ast::AstNode> {
|
struct AstIdWithPath<T: AstIdNode> {
|
||||||
ast_id: AstId<T>,
|
ast_id: AstId<T>,
|
||||||
path: path::ModPath,
|
path: path::ModPath,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: ast::AstNode> AstIdWithPath<T> {
|
impl<T: AstIdNode> AstIdWithPath<T> {
|
||||||
fn new(file_id: HirFileId, ast_id: FileAstId<T>, path: path::ModPath) -> AstIdWithPath<T> {
|
fn new(file_id: HirFileId, ast_id: FileAstId<T>, path: path::ModPath) -> AstIdWithPath<T> {
|
||||||
AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path }
|
AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path }
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,9 @@
|
||||||
//! Context for lowering paths.
|
//! Context for lowering paths.
|
||||||
use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, AstId, HirFileId, InFile};
|
use hir_expand::{
|
||||||
|
ast_id_map::{AstIdMap, AstIdNode},
|
||||||
|
hygiene::Hygiene,
|
||||||
|
AstId, HirFileId, InFile,
|
||||||
|
};
|
||||||
use once_cell::unsync::OnceCell;
|
use once_cell::unsync::OnceCell;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
@ -37,7 +41,7 @@ impl<'a> LowerCtx<'a> {
|
||||||
Path::from_src(ast, self)
|
Path::from_src(ast, self)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn ast_id<N: syntax::AstNode>(&self, item: &N) -> Option<AstId<N>> {
|
pub(crate) fn ast_id<N: AstIdNode>(&self, item: &N) -> Option<AstId<N>> {
|
||||||
let &(file_id, ref ast_id_map) = self.ast_id_map.as_ref()?;
|
let &(file_id, ref ast_id_map) = self.ast_id_map.as_ref()?;
|
||||||
let ast_id_map = ast_id_map.get_or_init(|| self.db.ast_id_map(file_id));
|
let ast_id_map = ast_id_map.get_or_init(|| self.db.ast_id_map(file_id));
|
||||||
Some(InFile::new(file_id, ast_id_map.ast_id(item)))
|
Some(InFile::new(file_id, ast_id_map.ast_id(item)))
|
||||||
|
|
|
@ -278,6 +278,44 @@ impl < > core::cmp::Eq for Command< > where {}"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_partial_eq_expand_with_derive_const() {
|
||||||
|
// FIXME: actually expand with const
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- minicore: derive, eq
|
||||||
|
#[derive_const(PartialEq, Eq)]
|
||||||
|
enum Command {
|
||||||
|
Move { x: i32, y: i32 },
|
||||||
|
Do(&'static str),
|
||||||
|
Jump,
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
#[derive_const(PartialEq, Eq)]
|
||||||
|
enum Command {
|
||||||
|
Move { x: i32, y: i32 },
|
||||||
|
Do(&'static str),
|
||||||
|
Jump,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl < > core::cmp::PartialEq for Command< > where {
|
||||||
|
fn eq(&self , other: &Self ) -> bool {
|
||||||
|
match (self , other) {
|
||||||
|
(Command::Move {
|
||||||
|
x: x_self, y: y_self,
|
||||||
|
}
|
||||||
|
, Command::Move {
|
||||||
|
x: x_other, y: y_other,
|
||||||
|
}
|
||||||
|
)=>x_self.eq(x_other) && y_self.eq(y_other), (Command::Do(f0_self, ), Command::Do(f0_other, ))=>f0_self.eq(f0_other), (Command::Jump, Command::Jump)=>true , _unused=>false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl < > core::cmp::Eq for Command< > where {}"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_partial_ord_expand() {
|
fn test_partial_ord_expand() {
|
||||||
check(
|
check(
|
||||||
|
@ -378,6 +416,44 @@ fn test_hash_expand() {
|
||||||
//- minicore: derive, hash
|
//- minicore: derive, hash
|
||||||
use core::hash::Hash;
|
use core::hash::Hash;
|
||||||
|
|
||||||
|
#[derive(Hash)]
|
||||||
|
struct Foo {
|
||||||
|
x: i32,
|
||||||
|
y: u64,
|
||||||
|
z: (i32, u64),
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
use core::hash::Hash;
|
||||||
|
|
||||||
|
#[derive(Hash)]
|
||||||
|
struct Foo {
|
||||||
|
x: i32,
|
||||||
|
y: u64,
|
||||||
|
z: (i32, u64),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl < > core::hash::Hash for Foo< > where {
|
||||||
|
fn hash<H: core::hash::Hasher>(&self , ra_expand_state: &mut H) {
|
||||||
|
match self {
|
||||||
|
Foo {
|
||||||
|
x: x, y: y, z: z,
|
||||||
|
}
|
||||||
|
=> {
|
||||||
|
x.hash(ra_expand_state);
|
||||||
|
y.hash(ra_expand_state);
|
||||||
|
z.hash(ra_expand_state);
|
||||||
|
}
|
||||||
|
,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}"#]],
|
||||||
|
);
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- minicore: derive, hash
|
||||||
|
use core::hash::Hash;
|
||||||
|
|
||||||
#[derive(Hash)]
|
#[derive(Hash)]
|
||||||
enum Command {
|
enum Command {
|
||||||
Move { x: i32, y: i32 },
|
Move { x: i32, y: i32 },
|
||||||
|
|
|
@ -201,7 +201,7 @@ macro_rules! format_args {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::Argument::new(&(arg1(a, b, c)), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(arg2), ::core::fmt::Debug::fmt), ]);
|
::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::ArgumentV1::new(&(arg1(a, b, c)), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(arg2), ::core::fmt::Debug::fmt), ]);
|
||||||
}
|
}
|
||||||
"##]],
|
"##]],
|
||||||
);
|
);
|
||||||
|
@ -235,11 +235,11 @@ macro_rules! format_args {
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
/* error: no rule matches input tokens */;
|
/* error: no rule matches input tokens */;
|
||||||
/* error: no rule matches input tokens */;
|
/* error: expected expression */;
|
||||||
/* error: no rule matches input tokens */;
|
/* error: expected expression, expected COMMA */;
|
||||||
/* error: no rule matches input tokens */::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::Argument::new(&(), ::core::fmt::Display::fmt), ]);
|
/* error: expected expression */::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(), ::core::fmt::Display::fmt), ]);
|
||||||
/* error: no rule matches input tokens */;
|
/* error: expected expression, expected R_PAREN */;
|
||||||
::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::Argument::new(&(5), ::core::fmt::Display::fmt), ]);
|
::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(5), ::core::fmt::Display::fmt), ]);
|
||||||
}
|
}
|
||||||
"##]],
|
"##]],
|
||||||
);
|
);
|
||||||
|
@ -267,7 +267,7 @@ macro_rules! format_args {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::Argument::new(&(a::<A, B>()), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(b), ::core::fmt::Debug::fmt), ]);
|
::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::ArgumentV1::new(&(a::<A, B>()), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(b), ::core::fmt::Debug::fmt), ]);
|
||||||
}
|
}
|
||||||
"##]],
|
"##]],
|
||||||
);
|
);
|
||||||
|
@ -300,7 +300,7 @@ macro_rules! format_args {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
::core::fmt::Arguments::new_v1(&[r#""#, r#",mismatch,""#, r#"",""#, r#"""#, ], &[::core::fmt::Argument::new(&(location_csv_pat(db, &analysis, vfs, &sm, pat_id)), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(mismatch.expected.display(db)), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(mismatch.actual.display(db)), ::core::fmt::Display::fmt), ]);
|
::core::fmt::Arguments::new_v1(&[r#""#, r#",mismatch,""#, r#"",""#, r#"""#, ], &[::core::fmt::ArgumentV1::new(&(location_csv_pat(db, &analysis, vfs, &sm, pat_id)), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(mismatch.expected.display(db)), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(mismatch.actual.display(db)), ::core::fmt::Display::fmt), ]);
|
||||||
}
|
}
|
||||||
"##]],
|
"##]],
|
||||||
);
|
);
|
||||||
|
@ -334,7 +334,7 @@ macro_rules! format_args {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
::core::fmt::Arguments::new_v1(&["xxx", "y", "zzz", ], &[::core::fmt::Argument::new(&(2), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(b), ::core::fmt::Debug::fmt), ]);
|
::core::fmt::Arguments::new_v1(&["xxx", "y", "zzz", ], &[::core::fmt::ArgumentV1::new(&(2), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(b), ::core::fmt::Debug::fmt), ]);
|
||||||
}
|
}
|
||||||
"##]],
|
"##]],
|
||||||
);
|
);
|
||||||
|
@ -364,8 +364,8 @@ macro_rules! format_args {
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let _ =
|
let _ =
|
||||||
/* error: no rule matches input tokens *//* parse error: expected field name or number */
|
/* error: expected field name or number *//* parse error: expected field name or number */
|
||||||
::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::Argument::new(&(a.), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(), ::core::fmt::Debug::fmt), ]);
|
::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::ArgumentV1::new(&(a.), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(), ::core::fmt::Debug::fmt), ]);
|
||||||
}
|
}
|
||||||
"##]],
|
"##]],
|
||||||
);
|
);
|
||||||
|
|
|
@ -98,6 +98,42 @@ fn#19 main#20(#21)#21 {#22
|
||||||
"##]],
|
"##]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn token_mapping_eager() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
#[rustc_builtin_macro]
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! format_args {}
|
||||||
|
|
||||||
|
macro_rules! identity {
|
||||||
|
($expr:expr) => { $expr };
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main(foo: ()) {
|
||||||
|
format_args/*+tokenids*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
|
||||||
|
}
|
||||||
|
|
||||||
|
"#,
|
||||||
|
expect![[r##"
|
||||||
|
#[rustc_builtin_macro]
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! format_args {}
|
||||||
|
|
||||||
|
macro_rules! identity {
|
||||||
|
($expr:expr) => { $expr };
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main(foo: ()) {
|
||||||
|
// format_args/*+tokenids*/!("{} {} {}"#1,#3 format_args!("{}", 0#10),#12 foo#13,#14 identity!(10#18),#21 "bar"#22)
|
||||||
|
::core#4294967295::fmt#4294967295::Arguments#4294967295::new_v1#4294967295(�[#4294967295""#4294967295,#4294967295 " "#4294967295,#4294967295 " "#4294967295,#4294967295 ]#4294967295,#4294967295 �[::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(�(::core#4294967295::fmt#4294967295::Arguments#4294967295::new_v1#4294967295(�[#4294967295""#4294967295,#4294967295 ]#4294967295,#4294967295 �[::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(�(#42949672950#10)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ]#4294967295)#4294967295)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(�(#4294967295foo#13)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(�(#429496729510#18)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ]#4294967295)#4294967295
|
||||||
|
}
|
||||||
|
|
||||||
|
"##]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn float_field_access_macro_input() {
|
fn float_field_access_macro_input() {
|
||||||
check(
|
check(
|
||||||
|
|
|
@ -20,8 +20,8 @@ use ::mbe::TokenMap;
|
||||||
use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
|
use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
|
||||||
use expect_test::Expect;
|
use expect_test::Expect;
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
db::{ExpandDatabase, TokenExpander},
|
db::{DeclarativeMacroExpander, ExpandDatabase},
|
||||||
AstId, InFile, MacroDefId, MacroDefKind, MacroFile,
|
AstId, InFile, MacroFile,
|
||||||
};
|
};
|
||||||
use stdx::format_to;
|
use stdx::format_to;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
|
@ -100,12 +100,10 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
||||||
let call_offset = macro_.syntax().text_range().start().into();
|
let call_offset = macro_.syntax().text_range().start().into();
|
||||||
let file_ast_id = db.ast_id_map(source.file_id).ast_id(¯o_);
|
let file_ast_id = db.ast_id_map(source.file_id).ast_id(¯o_);
|
||||||
let ast_id = AstId::new(source.file_id, file_ast_id.upcast());
|
let ast_id = AstId::new(source.file_id, file_ast_id.upcast());
|
||||||
let kind = MacroDefKind::Declarative(ast_id);
|
|
||||||
|
|
||||||
let macro_def = db
|
let DeclarativeMacroExpander { mac, def_site_token_map } =
|
||||||
.macro_def(MacroDefId { krate, kind, local_inner: false, allow_internal_unsafe: false })
|
&*db.decl_macro_expander(krate, ast_id);
|
||||||
.unwrap();
|
assert_eq!(mac.err(), None);
|
||||||
if let TokenExpander::DeclarativeMacro { mac, def_site_token_map } = &*macro_def {
|
|
||||||
let tt = match ¯o_ {
|
let tt = match ¯o_ {
|
||||||
ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(),
|
ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(),
|
||||||
ast::Macro::MacroDef(_) => unimplemented!(""),
|
ast::Macro::MacroDef(_) => unimplemented!(""),
|
||||||
|
@ -126,7 +124,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
||||||
format!("// call ids will be shifted by {:?}\n", mac.shift()),
|
format!("// call ids will be shifted by {:?}\n", mac.shift()),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
|
for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
|
||||||
let macro_call = InFile::new(source.file_id, ¯o_call);
|
let macro_call = InFile::new(source.file_id, ¯o_call);
|
||||||
|
@ -190,7 +187,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
||||||
let range: Range<usize> = range.into();
|
let range: Range<usize> = range.into();
|
||||||
|
|
||||||
if show_token_ids {
|
if show_token_ids {
|
||||||
if let Some((tree, map, _)) = arg.as_deref() {
|
if let Some((tree, map, _)) = arg.value.as_deref() {
|
||||||
let tt_range = call.token_tree().unwrap().syntax().text_range();
|
let tt_range = call.token_tree().unwrap().syntax().text_range();
|
||||||
let mut ranges = Vec::new();
|
let mut ranges = Vec::new();
|
||||||
extract_id_ranges(&mut ranges, map, tree);
|
extract_id_ranges(&mut ranges, map, tree);
|
||||||
|
@ -239,7 +236,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
||||||
|
|
||||||
for impl_id in def_map[local_id].scope.impls() {
|
for impl_id in def_map[local_id].scope.impls() {
|
||||||
let src = impl_id.lookup(&db).source(&db);
|
let src = impl_id.lookup(&db).source(&db);
|
||||||
if src.file_id.is_builtin_derive(&db).is_some() {
|
if src.file_id.is_builtin_derive(&db) {
|
||||||
let pp = pretty_print_macro_expansion(src.value.syntax().clone(), None);
|
let pp = pretty_print_macro_expansion(src.value.syntax().clone(), None);
|
||||||
format_to!(expanded_text, "\n{}", pp)
|
format_to!(expanded_text, "\n{}", pp)
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,7 +60,7 @@ mod tests;
|
||||||
use std::{cmp::Ord, ops::Deref};
|
use std::{cmp::Ord, ops::Deref};
|
||||||
|
|
||||||
use base_db::{CrateId, Edition, FileId, ProcMacroKind};
|
use base_db::{CrateId, Edition, FileId, ProcMacroKind};
|
||||||
use hir_expand::{name::Name, InFile, MacroCallId, MacroDefId};
|
use hir_expand::{name::Name, HirFileId, InFile, MacroCallId, MacroDefId};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use la_arena::Arena;
|
use la_arena::Arena;
|
||||||
use profile::Count;
|
use profile::Count;
|
||||||
|
@ -196,6 +196,10 @@ impl BlockRelativeModuleId {
|
||||||
fn into_module(self, krate: CrateId) -> ModuleId {
|
fn into_module(self, krate: CrateId) -> ModuleId {
|
||||||
ModuleId { krate, block: self.block, local_id: self.local_id }
|
ModuleId { krate, block: self.block, local_id: self.local_id }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn is_block_module(self) -> bool {
|
||||||
|
self.block.is_some() && self.local_id == DefMap::ROOT
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::ops::Index<LocalModuleId> for DefMap {
|
impl std::ops::Index<LocalModuleId> for DefMap {
|
||||||
|
@ -278,7 +282,9 @@ pub struct ModuleData {
|
||||||
pub origin: ModuleOrigin,
|
pub origin: ModuleOrigin,
|
||||||
/// Declared visibility of this module.
|
/// Declared visibility of this module.
|
||||||
pub visibility: Visibility,
|
pub visibility: Visibility,
|
||||||
/// Always [`None`] for block modules
|
/// Parent module in the same `DefMap`.
|
||||||
|
///
|
||||||
|
/// [`None`] for block modules because they are always its `DefMap`'s root.
|
||||||
pub parent: Option<LocalModuleId>,
|
pub parent: Option<LocalModuleId>,
|
||||||
pub children: FxHashMap<Name, LocalModuleId>,
|
pub children: FxHashMap<Name, LocalModuleId>,
|
||||||
pub scope: ItemScope,
|
pub scope: ItemScope,
|
||||||
|
@ -626,6 +632,17 @@ impl ModuleData {
|
||||||
self.origin.definition_source(db)
|
self.origin.definition_source(db)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Same as [`definition_source`] but only returns the file id to prevent parsing the ASt.
|
||||||
|
pub fn definition_source_file_id(&self) -> HirFileId {
|
||||||
|
match self.origin {
|
||||||
|
ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => {
|
||||||
|
definition.into()
|
||||||
|
}
|
||||||
|
ModuleOrigin::Inline { definition, .. } => definition.file_id,
|
||||||
|
ModuleOrigin::BlockExpr { block } => block.file_id,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
|
/// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
|
||||||
/// `None` for the crate root or block.
|
/// `None` for the crate root or block.
|
||||||
pub fn declaration_source(&self, db: &dyn DefDatabase) -> Option<InFile<ast::Module>> {
|
pub fn declaration_source(&self, db: &dyn DefDatabase) -> Option<InFile<ast::Module>> {
|
||||||
|
|
|
@ -52,10 +52,10 @@ use crate::{
|
||||||
tt,
|
tt,
|
||||||
visibility::{RawVisibility, Visibility},
|
visibility::{RawVisibility, Visibility},
|
||||||
AdtId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, EnumVariantId,
|
AdtId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, EnumVariantId,
|
||||||
ExternBlockLoc, FunctionId, FunctionLoc, ImplLoc, Intern, ItemContainerId, LocalModuleId,
|
ExternBlockLoc, ExternCrateLoc, FunctionId, FunctionLoc, ImplLoc, ImportLoc, Intern,
|
||||||
Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, ModuleDefId,
|
ItemContainerId, LocalModuleId, Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId,
|
||||||
ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc, StructLoc, TraitAliasLoc, TraitLoc,
|
MacroRulesLoc, ModuleDefId, ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc, StructLoc,
|
||||||
TypeAliasLoc, UnionLoc, UnresolvedMacro,
|
TraitAliasLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro,
|
||||||
};
|
};
|
||||||
|
|
||||||
static GLOB_RECURSION_LIMIT: Limit = Limit::new(100);
|
static GLOB_RECURSION_LIMIT: Limit = Limit::new(100);
|
||||||
|
@ -156,10 +156,9 @@ struct Import {
|
||||||
alias: Option<ImportAlias>,
|
alias: Option<ImportAlias>,
|
||||||
visibility: RawVisibility,
|
visibility: RawVisibility,
|
||||||
kind: ImportKind,
|
kind: ImportKind,
|
||||||
is_prelude: bool,
|
|
||||||
is_extern_crate: bool,
|
|
||||||
is_macro_use: bool,
|
|
||||||
source: ImportSource,
|
source: ImportSource,
|
||||||
|
is_prelude: bool,
|
||||||
|
is_macro_use: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Import {
|
impl Import {
|
||||||
|
@ -168,26 +167,23 @@ impl Import {
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
tree: &ItemTree,
|
tree: &ItemTree,
|
||||||
id: ItemTreeId<item_tree::Import>,
|
id: ItemTreeId<item_tree::Import>,
|
||||||
) -> Vec<Self> {
|
mut cb: impl FnMut(Self),
|
||||||
|
) {
|
||||||
let it = &tree[id.value];
|
let it = &tree[id.value];
|
||||||
let attrs = &tree.attrs(db, krate, ModItem::from(id.value).into());
|
let attrs = &tree.attrs(db, krate, ModItem::from(id.value).into());
|
||||||
let visibility = &tree[it.visibility];
|
let visibility = &tree[it.visibility];
|
||||||
let is_prelude = attrs.by_key("prelude_import").exists();
|
let is_prelude = attrs.by_key("prelude_import").exists();
|
||||||
|
|
||||||
let mut res = Vec::new();
|
|
||||||
it.use_tree.expand(|idx, path, kind, alias| {
|
it.use_tree.expand(|idx, path, kind, alias| {
|
||||||
res.push(Self {
|
cb(Self {
|
||||||
path,
|
path,
|
||||||
alias,
|
alias,
|
||||||
visibility: visibility.clone(),
|
visibility: visibility.clone(),
|
||||||
kind,
|
kind,
|
||||||
is_prelude,
|
is_prelude,
|
||||||
is_extern_crate: false,
|
|
||||||
is_macro_use: false,
|
is_macro_use: false,
|
||||||
source: ImportSource::Import { id, use_tree: idx },
|
source: ImportSource::Import { id, use_tree: idx },
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
res
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_extern_crate(
|
fn from_extern_crate(
|
||||||
|
@ -205,7 +201,6 @@ impl Import {
|
||||||
visibility: visibility.clone(),
|
visibility: visibility.clone(),
|
||||||
kind: ImportKind::Plain,
|
kind: ImportKind::Plain,
|
||||||
is_prelude: false,
|
is_prelude: false,
|
||||||
is_extern_crate: true,
|
|
||||||
is_macro_use: attrs.by_key("macro_use").exists(),
|
is_macro_use: attrs.by_key("macro_use").exists(),
|
||||||
source: ImportSource::ExternCrate(id),
|
source: ImportSource::ExternCrate(id),
|
||||||
}
|
}
|
||||||
|
@ -776,7 +771,7 @@ impl DefCollector<'_> {
|
||||||
let _p = profile::span("resolve_import")
|
let _p = profile::span("resolve_import")
|
||||||
.detail(|| format!("{}", import.path.display(self.db.upcast())));
|
.detail(|| format!("{}", import.path.display(self.db.upcast())));
|
||||||
tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition);
|
tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition);
|
||||||
if import.is_extern_crate {
|
if matches!(import.source, ImportSource::ExternCrate { .. }) {
|
||||||
let name = import
|
let name = import
|
||||||
.path
|
.path
|
||||||
.as_ident()
|
.as_ident()
|
||||||
|
@ -813,11 +808,8 @@ impl DefCollector<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check whether all namespace is resolved
|
// Check whether all namespaces are resolved.
|
||||||
if def.take_types().is_some()
|
if def.is_full() {
|
||||||
&& def.take_values().is_some()
|
|
||||||
&& def.take_macros().is_some()
|
|
||||||
{
|
|
||||||
PartialResolvedImport::Resolved(def)
|
PartialResolvedImport::Resolved(def)
|
||||||
} else {
|
} else {
|
||||||
PartialResolvedImport::Indeterminate(def)
|
PartialResolvedImport::Indeterminate(def)
|
||||||
|
@ -826,7 +818,7 @@ impl DefCollector<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_extern_crate(&self, name: &Name) -> Option<CrateRootModuleId> {
|
fn resolve_extern_crate(&self, name: &Name) -> Option<CrateRootModuleId> {
|
||||||
if *name == name!(self) {
|
if *name == name![self] {
|
||||||
cov_mark::hit!(extern_crate_self_as);
|
cov_mark::hit!(extern_crate_self_as);
|
||||||
Some(self.def_map.crate_root())
|
Some(self.def_map.crate_root())
|
||||||
} else {
|
} else {
|
||||||
|
@ -867,7 +859,7 @@ impl DefCollector<'_> {
|
||||||
tracing::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def);
|
tracing::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def);
|
||||||
|
|
||||||
// extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658
|
// extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658
|
||||||
if import.is_extern_crate
|
if matches!(import.source, ImportSource::ExternCrate { .. })
|
||||||
&& self.def_map.block.is_none()
|
&& self.def_map.block.is_none()
|
||||||
&& module_id == DefMap::ROOT
|
&& module_id == DefMap::ROOT
|
||||||
{
|
{
|
||||||
|
@ -1585,21 +1577,34 @@ impl ModCollector<'_, '_> {
|
||||||
match item {
|
match item {
|
||||||
ModItem::Mod(m) => self.collect_module(m, &attrs),
|
ModItem::Mod(m) => self.collect_module(m, &attrs),
|
||||||
ModItem::Import(import_id) => {
|
ModItem::Import(import_id) => {
|
||||||
let imports = Import::from_use(
|
let _import_id = ImportLoc {
|
||||||
|
container: module,
|
||||||
|
id: ItemTreeId::new(self.tree_id, import_id),
|
||||||
|
}
|
||||||
|
.intern(db);
|
||||||
|
Import::from_use(
|
||||||
db,
|
db,
|
||||||
krate,
|
krate,
|
||||||
self.item_tree,
|
self.item_tree,
|
||||||
ItemTreeId::new(self.tree_id, import_id),
|
ItemTreeId::new(self.tree_id, import_id),
|
||||||
);
|
|import| {
|
||||||
self.def_collector.unresolved_imports.extend(imports.into_iter().map(
|
self.def_collector.unresolved_imports.push(ImportDirective {
|
||||||
|import| ImportDirective {
|
|
||||||
module_id: self.module_id,
|
module_id: self.module_id,
|
||||||
import,
|
import,
|
||||||
status: PartialResolvedImport::Unresolved,
|
status: PartialResolvedImport::Unresolved,
|
||||||
|
});
|
||||||
},
|
},
|
||||||
));
|
)
|
||||||
}
|
}
|
||||||
ModItem::ExternCrate(import_id) => {
|
ModItem::ExternCrate(import_id) => {
|
||||||
|
let extern_crate_id = ExternCrateLoc {
|
||||||
|
container: module,
|
||||||
|
id: ItemTreeId::new(self.tree_id, import_id),
|
||||||
|
}
|
||||||
|
.intern(db);
|
||||||
|
self.def_collector.def_map.modules[self.module_id]
|
||||||
|
.scope
|
||||||
|
.define_extern_crate_decl(extern_crate_id);
|
||||||
self.def_collector.unresolved_imports.push(ImportDirective {
|
self.def_collector.unresolved_imports.push(ImportDirective {
|
||||||
module_id: self.module_id,
|
module_id: self.module_id,
|
||||||
import: Import::from_extern_crate(
|
import: Import::from_extern_crate(
|
||||||
|
@ -2230,8 +2235,12 @@ impl ModCollector<'_, '_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn import_all_legacy_macros(&mut self, module_id: LocalModuleId) {
|
fn import_all_legacy_macros(&mut self, module_id: LocalModuleId) {
|
||||||
let Some((source, target)) = Self::borrow_modules(self.def_collector.def_map.modules.as_mut(), module_id, self.module_id) else {
|
let Some((source, target)) = Self::borrow_modules(
|
||||||
return
|
self.def_collector.def_map.modules.as_mut(),
|
||||||
|
module_id,
|
||||||
|
self.module_id,
|
||||||
|
) else {
|
||||||
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
for (name, macs) in source.scope.legacy_macros() {
|
for (name, macs) in source.scope.legacy_macros() {
|
||||||
|
@ -2271,7 +2280,7 @@ impl ModCollector<'_, '_> {
|
||||||
fn emit_unconfigured_diagnostic(&mut self, item: ModItem, cfg: &CfgExpr) {
|
fn emit_unconfigured_diagnostic(&mut self, item: ModItem, cfg: &CfgExpr) {
|
||||||
let ast_id = item.ast_id(self.item_tree);
|
let ast_id = item.ast_id(self.item_tree);
|
||||||
|
|
||||||
let ast_id = InFile::new(self.file_id(), ast_id.upcast());
|
let ast_id = InFile::new(self.file_id(), ast_id.erase());
|
||||||
self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code(
|
self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code(
|
||||||
self.module_id,
|
self.module_id,
|
||||||
ast_id,
|
ast_id,
|
||||||
|
|
|
@ -2,12 +2,9 @@
|
||||||
|
|
||||||
use base_db::CrateId;
|
use base_db::CrateId;
|
||||||
use cfg::{CfgExpr, CfgOptions};
|
use cfg::{CfgExpr, CfgOptions};
|
||||||
use hir_expand::{attrs::AttrId, MacroCallKind};
|
use hir_expand::{attrs::AttrId, ErasedAstId, MacroCallKind};
|
||||||
use la_arena::Idx;
|
use la_arena::Idx;
|
||||||
use syntax::{
|
use syntax::{ast, SyntaxError};
|
||||||
ast::{self, AnyHasAttrs},
|
|
||||||
SyntaxError,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
item_tree::{self, ItemTreeId},
|
item_tree::{self, ItemTreeId},
|
||||||
|
@ -24,7 +21,7 @@ pub enum DefDiagnosticKind {
|
||||||
|
|
||||||
UnresolvedImport { id: ItemTreeId<item_tree::Import>, index: Idx<ast::UseTree> },
|
UnresolvedImport { id: ItemTreeId<item_tree::Import>, index: Idx<ast::UseTree> },
|
||||||
|
|
||||||
UnconfiguredCode { ast: AstId<AnyHasAttrs>, cfg: CfgExpr, opts: CfgOptions },
|
UnconfiguredCode { ast: ErasedAstId, cfg: CfgExpr, opts: CfgOptions },
|
||||||
|
|
||||||
UnresolvedProcMacro { ast: MacroCallKind, krate: CrateId },
|
UnresolvedProcMacro { ast: MacroCallKind, krate: CrateId },
|
||||||
|
|
||||||
|
@ -81,7 +78,7 @@ impl DefDiagnostic {
|
||||||
|
|
||||||
pub fn unconfigured_code(
|
pub fn unconfigured_code(
|
||||||
container: LocalModuleId,
|
container: LocalModuleId,
|
||||||
ast: AstId<ast::AnyHasAttrs>,
|
ast: ErasedAstId,
|
||||||
cfg: CfgExpr,
|
cfg: CfgExpr,
|
||||||
opts: CfgOptions,
|
opts: CfgOptions,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
|
|
|
@ -12,11 +12,12 @@
|
||||||
|
|
||||||
use base_db::Edition;
|
use base_db::Edition;
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::DefDatabase,
|
db::DefDatabase,
|
||||||
item_scope::BUILTIN_SCOPE,
|
item_scope::BUILTIN_SCOPE,
|
||||||
nameres::{sub_namespace_match, BuiltinShadowMode, DefMap, MacroSubNs},
|
nameres::{sub_namespace_match, BlockInfo, BuiltinShadowMode, DefMap, MacroSubNs},
|
||||||
path::{ModPath, PathKind},
|
path::{ModPath, PathKind},
|
||||||
per_ns::PerNs,
|
per_ns::PerNs,
|
||||||
visibility::{RawVisibility, Visibility},
|
visibility::{RawVisibility, Visibility},
|
||||||
|
@ -159,13 +160,15 @@ impl DefMap {
|
||||||
(None, new) => new,
|
(None, new) => new,
|
||||||
};
|
};
|
||||||
|
|
||||||
match ¤t_map.block {
|
match current_map.block {
|
||||||
Some(block) => {
|
Some(block) if original_module == Self::ROOT => {
|
||||||
|
// Block modules "inherit" names from its parent module.
|
||||||
original_module = block.parent.local_id;
|
original_module = block.parent.local_id;
|
||||||
arc = block.parent.def_map(db, current_map.krate);
|
arc = block.parent.def_map(db, current_map.krate);
|
||||||
current_map = &*arc;
|
current_map = &arc;
|
||||||
}
|
}
|
||||||
None => return result,
|
// Proper (non-block) modules, including those in block `DefMap`s, don't.
|
||||||
|
_ => return result,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -189,7 +192,7 @@ impl DefMap {
|
||||||
));
|
));
|
||||||
|
|
||||||
let mut segments = path.segments().iter().enumerate();
|
let mut segments = path.segments().iter().enumerate();
|
||||||
let mut curr_per_ns: PerNs = match path.kind {
|
let mut curr_per_ns = match path.kind {
|
||||||
PathKind::DollarCrate(krate) => {
|
PathKind::DollarCrate(krate) => {
|
||||||
if krate == self.krate {
|
if krate == self.krate {
|
||||||
cov_mark::hit!(macro_dollar_crate_self);
|
cov_mark::hit!(macro_dollar_crate_self);
|
||||||
|
@ -241,51 +244,54 @@ impl DefMap {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
PathKind::Super(lvl) => {
|
PathKind::Super(lvl) => {
|
||||||
let mut module = original_module;
|
let mut local_id = original_module;
|
||||||
for i in 0..lvl {
|
let mut ext;
|
||||||
match self.modules[module].parent {
|
let mut def_map = self;
|
||||||
Some(it) => module = it,
|
|
||||||
None => match &self.block {
|
// Adjust `local_id` to `self`, i.e. the nearest non-block module.
|
||||||
Some(block) => {
|
if def_map.module_id(local_id).is_block_module() {
|
||||||
// Look up remaining path in parent `DefMap`
|
(ext, local_id) = adjust_to_nearest_non_block_module(db, def_map, local_id);
|
||||||
let new_path = ModPath::from_segments(
|
def_map = &ext;
|
||||||
PathKind::Super(lvl - i),
|
}
|
||||||
path.segments().to_vec(),
|
|
||||||
);
|
// Go up the module tree but skip block modules as `super` always refers to the
|
||||||
tracing::debug!(
|
// nearest non-block module.
|
||||||
"`super` path: {} -> {} in parent map",
|
for _ in 0..lvl {
|
||||||
path.display(db.upcast()),
|
// Loop invariant: at the beginning of each loop, `local_id` must refer to a
|
||||||
new_path.display(db.upcast())
|
// non-block module.
|
||||||
);
|
if let Some(parent) = def_map.modules[local_id].parent {
|
||||||
return block
|
local_id = parent;
|
||||||
.parent
|
if def_map.module_id(local_id).is_block_module() {
|
||||||
.def_map(db, self.krate)
|
(ext, local_id) =
|
||||||
.resolve_path_fp_with_macro(
|
adjust_to_nearest_non_block_module(db, def_map, local_id);
|
||||||
|
def_map = &ext;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
stdx::always!(def_map.block.is_none());
|
||||||
|
tracing::debug!("super path in root module");
|
||||||
|
return ResolvePathResult::empty(ReachedFixedPoint::Yes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let module = def_map.module_id(local_id);
|
||||||
|
stdx::never!(module.is_block_module());
|
||||||
|
|
||||||
|
if self.block != def_map.block {
|
||||||
|
// If we have a different `DefMap` from `self` (the orignal `DefMap` we started
|
||||||
|
// with), resolve the remaining path segments in that `DefMap`.
|
||||||
|
let path =
|
||||||
|
ModPath::from_segments(PathKind::Super(0), path.segments().iter().cloned());
|
||||||
|
return def_map.resolve_path_fp_with_macro(
|
||||||
db,
|
db,
|
||||||
mode,
|
mode,
|
||||||
block.parent.local_id,
|
local_id,
|
||||||
&new_path,
|
&path,
|
||||||
shadow,
|
shadow,
|
||||||
expected_macro_subns,
|
expected_macro_subns,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
None => {
|
|
||||||
tracing::debug!("super path in root module");
|
|
||||||
return ResolvePathResult::empty(ReachedFixedPoint::Yes);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Resolve `self` to the containing crate-rooted module if we're a block
|
PerNs::types(module.into(), Visibility::Public)
|
||||||
self.with_ancestor_maps(db, module, &mut |def_map, module| {
|
|
||||||
if def_map.block.is_some() {
|
|
||||||
None // keep ascending
|
|
||||||
} else {
|
|
||||||
Some(PerNs::types(def_map.module_id(module).into(), Visibility::Public))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.expect("block DefMap not rooted in crate DefMap")
|
|
||||||
}
|
}
|
||||||
PathKind::Abs => {
|
PathKind::Abs => {
|
||||||
// 2018-style absolute path -- only extern prelude
|
// 2018-style absolute path -- only extern prelude
|
||||||
|
@ -508,3 +514,27 @@ impl DefMap {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Given a block module, returns its nearest non-block module and the `DefMap` it blongs to.
|
||||||
|
fn adjust_to_nearest_non_block_module(
|
||||||
|
db: &dyn DefDatabase,
|
||||||
|
def_map: &DefMap,
|
||||||
|
mut local_id: LocalModuleId,
|
||||||
|
) -> (Arc<DefMap>, LocalModuleId) {
|
||||||
|
// INVARIANT: `local_id` in `def_map` must be a block module.
|
||||||
|
stdx::always!(def_map.module_id(local_id).is_block_module());
|
||||||
|
|
||||||
|
let mut ext;
|
||||||
|
// This needs to be a local variable due to our mighty lifetime.
|
||||||
|
let mut def_map = def_map;
|
||||||
|
loop {
|
||||||
|
let BlockInfo { parent, .. } = def_map.block.expect("block module without parent module");
|
||||||
|
|
||||||
|
ext = parent.def_map(db, def_map.krate);
|
||||||
|
def_map = &ext;
|
||||||
|
local_id = parent.local_id;
|
||||||
|
if !parent.is_block_module() {
|
||||||
|
return (ext, local_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -45,7 +45,7 @@ pub enum Path {
|
||||||
/// Invariant: the same len as `self.mod_path.segments` or `None` if all segments are `None`.
|
/// Invariant: the same len as `self.mod_path.segments` or `None` if all segments are `None`.
|
||||||
generic_args: Option<Box<[Option<Interned<GenericArgs>>]>>,
|
generic_args: Option<Box<[Option<Interned<GenericArgs>>]>>,
|
||||||
},
|
},
|
||||||
/// A link to a lang item. It is used in desugaring of things like `x?`. We can show these
|
/// A link to a lang item. It is used in desugaring of things like `it?`. We can show these
|
||||||
/// links via a normal path since they might be private and not accessible in the usage place.
|
/// links via a normal path since they might be private and not accessible in the usage place.
|
||||||
LangItem(LangItemTarget),
|
LangItem(LangItemTarget),
|
||||||
}
|
}
|
||||||
|
@ -135,10 +135,7 @@ impl Path {
|
||||||
|
|
||||||
pub fn segments(&self) -> PathSegments<'_> {
|
pub fn segments(&self) -> PathSegments<'_> {
|
||||||
let Path::Normal { mod_path, generic_args, .. } = self else {
|
let Path::Normal { mod_path, generic_args, .. } = self else {
|
||||||
return PathSegments {
|
return PathSegments { segments: &[], generic_args: None };
|
||||||
segments: &[],
|
|
||||||
generic_args: None,
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
let s =
|
let s =
|
||||||
PathSegments { segments: mod_path.segments(), generic_args: generic_args.as_deref() };
|
PathSegments { segments: mod_path.segments(), generic_args: generic_args.as_deref() };
|
||||||
|
|
|
@ -74,8 +74,8 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
|
||||||
// <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo
|
// <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo
|
||||||
Some(trait_ref) => {
|
Some(trait_ref) => {
|
||||||
let Path::Normal { mod_path, generic_args: path_generic_args, .. } =
|
let Path::Normal { mod_path, generic_args: path_generic_args, .. } =
|
||||||
Path::from_src(trait_ref.path()?, ctx)? else
|
Path::from_src(trait_ref.path()?, ctx)?
|
||||||
{
|
else {
|
||||||
return None;
|
return None;
|
||||||
};
|
};
|
||||||
let num_segments = mod_path.segments().len();
|
let num_segments = mod_path.segments().len();
|
||||||
|
|
|
@ -12,8 +12,8 @@ use crate::{
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) fn print_path(db: &dyn ExpandDatabase, path: &Path, buf: &mut dyn Write) -> fmt::Result {
|
pub(crate) fn print_path(db: &dyn ExpandDatabase, path: &Path, buf: &mut dyn Write) -> fmt::Result {
|
||||||
if let Path::LangItem(x) = path {
|
if let Path::LangItem(it) = path {
|
||||||
return write!(buf, "$lang_item::{x:?}");
|
return write!(buf, "$lang_item::{it:?}");
|
||||||
}
|
}
|
||||||
match path.type_anchor() {
|
match path.type_anchor() {
|
||||||
Some(anchor) => {
|
Some(anchor) => {
|
||||||
|
|
|
@ -22,10 +22,10 @@ use crate::{
|
||||||
per_ns::PerNs,
|
per_ns::PerNs,
|
||||||
visibility::{RawVisibility, Visibility},
|
visibility::{RawVisibility, Visibility},
|
||||||
AdtId, AssocItemId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId,
|
AdtId, AssocItemId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId,
|
||||||
EnumVariantId, ExternBlockId, FunctionId, GenericDefId, GenericParamId, HasModule, ImplId,
|
EnumVariantId, ExternBlockId, ExternCrateId, FunctionId, GenericDefId, GenericParamId,
|
||||||
ItemContainerId, LifetimeParamId, LocalModuleId, Lookup, Macro2Id, MacroId, MacroRulesId,
|
HasModule, ImplId, ItemContainerId, LifetimeParamId, LocalModuleId, Lookup, Macro2Id, MacroId,
|
||||||
ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId,
|
MacroRulesId, ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId,
|
||||||
TypeOrConstParamId, TypeOwnerId, TypeParamId, VariantId,
|
TypeAliasId, TypeOrConstParamId, TypeOwnerId, TypeParamId, VariantId,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
|
@ -186,12 +186,12 @@ impl Resolver {
|
||||||
Path::LangItem(l) => {
|
Path::LangItem(l) => {
|
||||||
return Some((
|
return Some((
|
||||||
match *l {
|
match *l {
|
||||||
LangItemTarget::Union(x) => TypeNs::AdtId(x.into()),
|
LangItemTarget::Union(it) => TypeNs::AdtId(it.into()),
|
||||||
LangItemTarget::TypeAlias(x) => TypeNs::TypeAliasId(x),
|
LangItemTarget::TypeAlias(it) => TypeNs::TypeAliasId(it),
|
||||||
LangItemTarget::Struct(x) => TypeNs::AdtId(x.into()),
|
LangItemTarget::Struct(it) => TypeNs::AdtId(it.into()),
|
||||||
LangItemTarget::EnumVariant(x) => TypeNs::EnumVariantId(x),
|
LangItemTarget::EnumVariant(it) => TypeNs::EnumVariantId(it),
|
||||||
LangItemTarget::EnumId(x) => TypeNs::AdtId(x.into()),
|
LangItemTarget::EnumId(it) => TypeNs::AdtId(it.into()),
|
||||||
LangItemTarget::Trait(x) => TypeNs::TraitId(x),
|
LangItemTarget::Trait(it) => TypeNs::TraitId(it),
|
||||||
LangItemTarget::Function(_)
|
LangItemTarget::Function(_)
|
||||||
| LangItemTarget::ImplDef(_)
|
| LangItemTarget::ImplDef(_)
|
||||||
| LangItemTarget::Static(_) => return None,
|
| LangItemTarget::Static(_) => return None,
|
||||||
|
@ -273,10 +273,10 @@ impl Resolver {
|
||||||
Path::Normal { mod_path, .. } => mod_path,
|
Path::Normal { mod_path, .. } => mod_path,
|
||||||
Path::LangItem(l) => {
|
Path::LangItem(l) => {
|
||||||
return Some(ResolveValueResult::ValueNs(match *l {
|
return Some(ResolveValueResult::ValueNs(match *l {
|
||||||
LangItemTarget::Function(x) => ValueNs::FunctionId(x),
|
LangItemTarget::Function(it) => ValueNs::FunctionId(it),
|
||||||
LangItemTarget::Static(x) => ValueNs::StaticId(x),
|
LangItemTarget::Static(it) => ValueNs::StaticId(it),
|
||||||
LangItemTarget::Struct(x) => ValueNs::StructId(x),
|
LangItemTarget::Struct(it) => ValueNs::StructId(it),
|
||||||
LangItemTarget::EnumVariant(x) => ValueNs::EnumVariantId(x),
|
LangItemTarget::EnumVariant(it) => ValueNs::EnumVariantId(it),
|
||||||
LangItemTarget::Union(_)
|
LangItemTarget::Union(_)
|
||||||
| LangItemTarget::ImplDef(_)
|
| LangItemTarget::ImplDef(_)
|
||||||
| LangItemTarget::TypeAlias(_)
|
| LangItemTarget::TypeAlias(_)
|
||||||
|
@ -425,14 +425,14 @@ impl Resolver {
|
||||||
/// The shadowing is accounted for: in
|
/// The shadowing is accounted for: in
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// let x = 92;
|
/// let it = 92;
|
||||||
/// {
|
/// {
|
||||||
/// let x = 92;
|
/// let it = 92;
|
||||||
/// $0
|
/// $0
|
||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
/// there will be only one entry for `x` in the result.
|
/// there will be only one entry for `it` in the result.
|
||||||
///
|
///
|
||||||
/// The result is ordered *roughly* from the innermost scope to the
|
/// The result is ordered *roughly* from the innermost scope to the
|
||||||
/// outermost: when the name is introduced in two namespaces in two scopes,
|
/// outermost: when the name is introduced in two namespaces in two scopes,
|
||||||
|
@ -1018,20 +1018,26 @@ impl HasResolver for ExternBlockId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl HasResolver for ExternCrateId {
|
||||||
|
fn resolver(self, db: &dyn DefDatabase) -> Resolver {
|
||||||
|
self.lookup(db).container.resolver(db)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl HasResolver for TypeOwnerId {
|
impl HasResolver for TypeOwnerId {
|
||||||
fn resolver(self, db: &dyn DefDatabase) -> Resolver {
|
fn resolver(self, db: &dyn DefDatabase) -> Resolver {
|
||||||
match self {
|
match self {
|
||||||
TypeOwnerId::FunctionId(x) => x.resolver(db),
|
TypeOwnerId::FunctionId(it) => it.resolver(db),
|
||||||
TypeOwnerId::StaticId(x) => x.resolver(db),
|
TypeOwnerId::StaticId(it) => it.resolver(db),
|
||||||
TypeOwnerId::ConstId(x) => x.resolver(db),
|
TypeOwnerId::ConstId(it) => it.resolver(db),
|
||||||
TypeOwnerId::InTypeConstId(x) => x.lookup(db).owner.resolver(db),
|
TypeOwnerId::InTypeConstId(it) => it.lookup(db).owner.resolver(db),
|
||||||
TypeOwnerId::AdtId(x) => x.resolver(db),
|
TypeOwnerId::AdtId(it) => it.resolver(db),
|
||||||
TypeOwnerId::TraitId(x) => x.resolver(db),
|
TypeOwnerId::TraitId(it) => it.resolver(db),
|
||||||
TypeOwnerId::TraitAliasId(x) => x.resolver(db),
|
TypeOwnerId::TraitAliasId(it) => it.resolver(db),
|
||||||
TypeOwnerId::TypeAliasId(x) => x.resolver(db),
|
TypeOwnerId::TypeAliasId(it) => it.resolver(db),
|
||||||
TypeOwnerId::ImplId(x) => x.resolver(db),
|
TypeOwnerId::ImplId(it) => it.resolver(db),
|
||||||
TypeOwnerId::EnumVariantId(x) => x.resolver(db),
|
TypeOwnerId::EnumVariantId(it) => it.resolver(db),
|
||||||
TypeOwnerId::ModuleId(x) => x.resolver(db),
|
TypeOwnerId::ModuleId(it) => it.resolver(db),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,11 +16,9 @@ cov-mark = "2.0.0-pre.1"
|
||||||
tracing = "0.1.35"
|
tracing = "0.1.35"
|
||||||
either = "1.7.0"
|
either = "1.7.0"
|
||||||
rustc-hash = "1.1.0"
|
rustc-hash = "1.1.0"
|
||||||
la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
|
la-arena.workspace = true
|
||||||
itertools = "0.10.5"
|
itertools = "0.10.5"
|
||||||
hashbrown = { version = "0.12.1", features = [
|
hashbrown.workspace = true
|
||||||
"inline-more",
|
|
||||||
], default-features = false }
|
|
||||||
smallvec.workspace = true
|
smallvec.workspace = true
|
||||||
triomphe.workspace = true
|
triomphe.workspace = true
|
||||||
|
|
||||||
|
|
|
@ -18,47 +18,89 @@ use rustc_hash::FxHasher;
|
||||||
use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
|
use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
|
||||||
|
|
||||||
/// `AstId` points to an AST node in a specific file.
|
/// `AstId` points to an AST node in a specific file.
|
||||||
pub struct FileAstId<N: AstNode> {
|
pub struct FileAstId<N: AstIdNode> {
|
||||||
raw: ErasedFileAstId,
|
raw: ErasedFileAstId,
|
||||||
covariant: PhantomData<fn() -> N>,
|
covariant: PhantomData<fn() -> N>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<N: AstNode> Clone for FileAstId<N> {
|
impl<N: AstIdNode> Clone for FileAstId<N> {
|
||||||
fn clone(&self) -> FileAstId<N> {
|
fn clone(&self) -> FileAstId<N> {
|
||||||
*self
|
*self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<N: AstNode> Copy for FileAstId<N> {}
|
impl<N: AstIdNode> Copy for FileAstId<N> {}
|
||||||
|
|
||||||
impl<N: AstNode> PartialEq for FileAstId<N> {
|
impl<N: AstIdNode> PartialEq for FileAstId<N> {
|
||||||
fn eq(&self, other: &Self) -> bool {
|
fn eq(&self, other: &Self) -> bool {
|
||||||
self.raw == other.raw
|
self.raw == other.raw
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<N: AstNode> Eq for FileAstId<N> {}
|
impl<N: AstIdNode> Eq for FileAstId<N> {}
|
||||||
impl<N: AstNode> Hash for FileAstId<N> {
|
impl<N: AstIdNode> Hash for FileAstId<N> {
|
||||||
fn hash<H: Hasher>(&self, hasher: &mut H) {
|
fn hash<H: Hasher>(&self, hasher: &mut H) {
|
||||||
self.raw.hash(hasher);
|
self.raw.hash(hasher);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<N: AstNode> fmt::Debug for FileAstId<N> {
|
impl<N: AstIdNode> fmt::Debug for FileAstId<N> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
write!(f, "FileAstId::<{}>({})", type_name::<N>(), self.raw.into_raw())
|
write!(f, "FileAstId::<{}>({})", type_name::<N>(), self.raw.into_raw())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<N: AstNode> FileAstId<N> {
|
impl<N: AstIdNode> FileAstId<N> {
|
||||||
// Can't make this a From implementation because of coherence
|
// Can't make this a From implementation because of coherence
|
||||||
pub fn upcast<M: AstNode>(self) -> FileAstId<M>
|
pub fn upcast<M: AstIdNode>(self) -> FileAstId<M>
|
||||||
where
|
where
|
||||||
N: Into<M>,
|
N: Into<M>,
|
||||||
{
|
{
|
||||||
FileAstId { raw: self.raw, covariant: PhantomData }
|
FileAstId { raw: self.raw, covariant: PhantomData }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn erase(self) -> ErasedFileAstId {
|
||||||
|
self.raw
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type ErasedFileAstId = Idx<SyntaxNodePtr>;
|
pub type ErasedFileAstId = Idx<SyntaxNodePtr>;
|
||||||
|
|
||||||
|
pub trait AstIdNode: AstNode {}
|
||||||
|
macro_rules! register_ast_id_node {
|
||||||
|
(impl AstIdNode for $($ident:ident),+ ) => {
|
||||||
|
$(
|
||||||
|
impl AstIdNode for ast::$ident {}
|
||||||
|
)+
|
||||||
|
fn should_alloc_id(kind: syntax::SyntaxKind) -> bool {
|
||||||
|
$(
|
||||||
|
ast::$ident::can_cast(kind)
|
||||||
|
)||+
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
register_ast_id_node! {
|
||||||
|
impl AstIdNode for
|
||||||
|
Item,
|
||||||
|
Adt,
|
||||||
|
Enum,
|
||||||
|
Struct,
|
||||||
|
Union,
|
||||||
|
Const,
|
||||||
|
ExternBlock,
|
||||||
|
ExternCrate,
|
||||||
|
Fn,
|
||||||
|
Impl,
|
||||||
|
Macro,
|
||||||
|
MacroDef,
|
||||||
|
MacroRules,
|
||||||
|
MacroCall,
|
||||||
|
Module,
|
||||||
|
Static,
|
||||||
|
Trait,
|
||||||
|
TraitAlias,
|
||||||
|
TypeAlias,
|
||||||
|
Use,
|
||||||
|
AssocItem, BlockExpr, Variant, RecordField, TupleField, ConstArg
|
||||||
|
}
|
||||||
|
|
||||||
/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back.
|
/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back.
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
|
@ -92,14 +134,7 @@ impl AstIdMap {
|
||||||
// change parent's id. This means that, say, adding a new function to a
|
// change parent's id. This means that, say, adding a new function to a
|
||||||
// trait does not change ids of top-level items, which helps caching.
|
// trait does not change ids of top-level items, which helps caching.
|
||||||
bdfs(node, |it| {
|
bdfs(node, |it| {
|
||||||
let kind = it.kind();
|
if should_alloc_id(it.kind()) {
|
||||||
if ast::Item::can_cast(kind)
|
|
||||||
|| ast::BlockExpr::can_cast(kind)
|
|
||||||
|| ast::Variant::can_cast(kind)
|
|
||||||
|| ast::RecordField::can_cast(kind)
|
|
||||||
|| ast::TupleField::can_cast(kind)
|
|
||||||
|| ast::ConstArg::can_cast(kind)
|
|
||||||
{
|
|
||||||
res.alloc(&it);
|
res.alloc(&it);
|
||||||
true
|
true
|
||||||
} else {
|
} else {
|
||||||
|
@ -120,15 +155,19 @@ impl AstIdMap {
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> {
|
pub fn ast_id<N: AstIdNode>(&self, item: &N) -> FileAstId<N> {
|
||||||
let raw = self.erased_ast_id(item.syntax());
|
let raw = self.erased_ast_id(item.syntax());
|
||||||
FileAstId { raw, covariant: PhantomData }
|
FileAstId { raw, covariant: PhantomData }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get<N: AstNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
|
pub fn get<N: AstIdNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
|
||||||
AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
|
AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
|
||||||
|
self.arena[id].clone()
|
||||||
|
}
|
||||||
|
|
||||||
fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
|
fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
|
||||||
let ptr = SyntaxNodePtr::new(item);
|
let ptr = SyntaxNodePtr::new(item);
|
||||||
let hash = hash_ptr(&ptr);
|
let hash = hash_ptr(&ptr);
|
||||||
|
|
|
@ -35,7 +35,7 @@ macro_rules! register_builtin {
|
||||||
|
|
||||||
impl BuiltinAttrExpander {
|
impl BuiltinAttrExpander {
|
||||||
pub fn is_derive(self) -> bool {
|
pub fn is_derive(self) -> bool {
|
||||||
matches!(self, BuiltinAttrExpander::Derive)
|
matches!(self, BuiltinAttrExpander::Derive | BuiltinAttrExpander::DeriveConst)
|
||||||
}
|
}
|
||||||
pub fn is_test(self) -> bool {
|
pub fn is_test(self) -> bool {
|
||||||
matches!(self, BuiltinAttrExpander::Test)
|
matches!(self, BuiltinAttrExpander::Test)
|
||||||
|
@ -50,6 +50,8 @@ register_builtin! {
|
||||||
(cfg_accessible, CfgAccessible) => dummy_attr_expand,
|
(cfg_accessible, CfgAccessible) => dummy_attr_expand,
|
||||||
(cfg_eval, CfgEval) => dummy_attr_expand,
|
(cfg_eval, CfgEval) => dummy_attr_expand,
|
||||||
(derive, Derive) => derive_attr_expand,
|
(derive, Derive) => derive_attr_expand,
|
||||||
|
// derive const is equivalent to derive for our proposes.
|
||||||
|
(derive_const, DeriveConst) => derive_attr_expand,
|
||||||
(global_allocator, GlobalAllocator) => dummy_attr_expand,
|
(global_allocator, GlobalAllocator) => dummy_attr_expand,
|
||||||
(test, Test) => dummy_attr_expand,
|
(test, Test) => dummy_attr_expand,
|
||||||
(test_case, TestCase) => dummy_attr_expand
|
(test_case, TestCase) => dummy_attr_expand
|
||||||
|
|
|
@ -12,9 +12,7 @@ use crate::{
|
||||||
name::{AsName, Name},
|
name::{AsName, Name},
|
||||||
tt::{self, TokenId},
|
tt::{self, TokenId},
|
||||||
};
|
};
|
||||||
use syntax::ast::{
|
use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds};
|
||||||
self, AstNode, FieldList, HasAttrs, HasGenericParams, HasModuleItem, HasName, HasTypeBounds,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult, MacroCallId};
|
use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult, MacroCallId};
|
||||||
|
|
||||||
|
@ -30,12 +28,13 @@ macro_rules! register_builtin {
|
||||||
&self,
|
&self,
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &ast::Adt,
|
||||||
|
token_map: &TokenMap,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let expander = match *self {
|
let expander = match *self {
|
||||||
$( BuiltinDeriveExpander::$trait => $expand, )*
|
$( BuiltinDeriveExpander::$trait => $expand, )*
|
||||||
};
|
};
|
||||||
expander(db, id, tt)
|
expander(db, id, tt, token_map)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_by_name(name: &name::Name) -> Option<Self> {
|
fn find_by_name(name: &name::Name) -> Option<Self> {
|
||||||
|
@ -72,12 +71,12 @@ enum VariantShape {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tuple_field_iterator(n: usize) -> impl Iterator<Item = tt::Ident> {
|
fn tuple_field_iterator(n: usize) -> impl Iterator<Item = tt::Ident> {
|
||||||
(0..n).map(|x| Ident::new(format!("f{x}"), tt::TokenId::unspecified()))
|
(0..n).map(|it| Ident::new(format!("f{it}"), tt::TokenId::unspecified()))
|
||||||
}
|
}
|
||||||
|
|
||||||
impl VariantShape {
|
impl VariantShape {
|
||||||
fn as_pattern(&self, path: tt::Subtree) -> tt::Subtree {
|
fn as_pattern(&self, path: tt::Subtree) -> tt::Subtree {
|
||||||
self.as_pattern_map(path, |x| quote!(#x))
|
self.as_pattern_map(path, |it| quote!(#it))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn field_names(&self) -> Vec<tt::Ident> {
|
fn field_names(&self) -> Vec<tt::Ident> {
|
||||||
|
@ -95,17 +94,17 @@ impl VariantShape {
|
||||||
) -> tt::Subtree {
|
) -> tt::Subtree {
|
||||||
match self {
|
match self {
|
||||||
VariantShape::Struct(fields) => {
|
VariantShape::Struct(fields) => {
|
||||||
let fields = fields.iter().map(|x| {
|
let fields = fields.iter().map(|it| {
|
||||||
let mapped = field_map(x);
|
let mapped = field_map(it);
|
||||||
quote! { #x : #mapped , }
|
quote! { #it : #mapped , }
|
||||||
});
|
});
|
||||||
quote! {
|
quote! {
|
||||||
#path { ##fields }
|
#path { ##fields }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
&VariantShape::Tuple(n) => {
|
&VariantShape::Tuple(n) => {
|
||||||
let fields = tuple_field_iterator(n).map(|x| {
|
let fields = tuple_field_iterator(n).map(|it| {
|
||||||
let mapped = field_map(&x);
|
let mapped = field_map(&it);
|
||||||
quote! {
|
quote! {
|
||||||
#mapped ,
|
#mapped ,
|
||||||
}
|
}
|
||||||
|
@ -118,16 +117,16 @@ impl VariantShape {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from(value: Option<FieldList>, token_map: &TokenMap) -> Result<Self, ExpandError> {
|
fn from(tm: &TokenMap, value: Option<FieldList>) -> Result<Self, ExpandError> {
|
||||||
let r = match value {
|
let r = match value {
|
||||||
None => VariantShape::Unit,
|
None => VariantShape::Unit,
|
||||||
Some(FieldList::RecordFieldList(x)) => VariantShape::Struct(
|
Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
|
||||||
x.fields()
|
it.fields()
|
||||||
.map(|x| x.name())
|
.map(|it| it.name())
|
||||||
.map(|x| name_to_token(token_map, x))
|
.map(|it| name_to_token(tm, it))
|
||||||
.collect::<Result<_, _>>()?,
|
.collect::<Result<_, _>>()?,
|
||||||
),
|
),
|
||||||
Some(FieldList::TupleFieldList(x)) => VariantShape::Tuple(x.fields().count()),
|
Some(FieldList::TupleFieldList(it)) => VariantShape::Tuple(it.fields().count()),
|
||||||
};
|
};
|
||||||
Ok(r)
|
Ok(r)
|
||||||
}
|
}
|
||||||
|
@ -141,7 +140,7 @@ enum AdtShape {
|
||||||
|
|
||||||
impl AdtShape {
|
impl AdtShape {
|
||||||
fn as_pattern(&self, name: &tt::Ident) -> Vec<tt::Subtree> {
|
fn as_pattern(&self, name: &tt::Ident) -> Vec<tt::Subtree> {
|
||||||
self.as_pattern_map(name, |x| quote!(#x))
|
self.as_pattern_map(name, |it| quote!(#it))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn field_names(&self) -> Vec<Vec<tt::Ident>> {
|
fn field_names(&self) -> Vec<Vec<tt::Ident>> {
|
||||||
|
@ -190,32 +189,19 @@ struct BasicAdtInfo {
|
||||||
associated_types: Vec<tt::Subtree>,
|
associated_types: Vec<tt::Subtree>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
|
fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError> {
|
||||||
let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems);
|
|
||||||
let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| {
|
|
||||||
debug!("derive node didn't parse");
|
|
||||||
ExpandError::other("invalid item definition")
|
|
||||||
})?;
|
|
||||||
let item = macro_items.items().next().ok_or_else(|| {
|
|
||||||
debug!("no module item parsed");
|
|
||||||
ExpandError::other("no item found")
|
|
||||||
})?;
|
|
||||||
let adt = ast::Adt::cast(item.syntax().clone()).ok_or_else(|| {
|
|
||||||
debug!("expected adt, found: {:?}", item);
|
|
||||||
ExpandError::other("expected struct, enum or union")
|
|
||||||
})?;
|
|
||||||
let (name, generic_param_list, shape) = match &adt {
|
let (name, generic_param_list, shape) = match &adt {
|
||||||
ast::Adt::Struct(it) => (
|
ast::Adt::Struct(it) => (
|
||||||
it.name(),
|
it.name(),
|
||||||
it.generic_param_list(),
|
it.generic_param_list(),
|
||||||
AdtShape::Struct(VariantShape::from(it.field_list(), &token_map)?),
|
AdtShape::Struct(VariantShape::from(tm, it.field_list())?),
|
||||||
),
|
),
|
||||||
ast::Adt::Enum(it) => {
|
ast::Adt::Enum(it) => {
|
||||||
let default_variant = it
|
let default_variant = it
|
||||||
.variant_list()
|
.variant_list()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|x| x.variants())
|
.flat_map(|it| it.variants())
|
||||||
.position(|x| x.attrs().any(|x| x.simple_name() == Some("default".into())));
|
.position(|it| it.attrs().any(|it| it.simple_name() == Some("default".into())));
|
||||||
(
|
(
|
||||||
it.name(),
|
it.name(),
|
||||||
it.generic_param_list(),
|
it.generic_param_list(),
|
||||||
|
@ -224,11 +210,11 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
|
||||||
variants: it
|
variants: it
|
||||||
.variant_list()
|
.variant_list()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|x| x.variants())
|
.flat_map(|it| it.variants())
|
||||||
.map(|x| {
|
.map(|it| {
|
||||||
Ok((
|
Ok((
|
||||||
name_to_token(&token_map, x.name())?,
|
name_to_token(tm, it.name())?,
|
||||||
VariantShape::from(x.field_list(), &token_map)?,
|
VariantShape::from(tm, it.field_list())?,
|
||||||
))
|
))
|
||||||
})
|
})
|
||||||
.collect::<Result<_, ExpandError>>()?,
|
.collect::<Result<_, ExpandError>>()?,
|
||||||
|
@ -246,16 +232,16 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
|
||||||
let name = {
|
let name = {
|
||||||
let this = param.name();
|
let this = param.name();
|
||||||
match this {
|
match this {
|
||||||
Some(x) => {
|
Some(it) => {
|
||||||
param_type_set.insert(x.as_name());
|
param_type_set.insert(it.as_name());
|
||||||
mbe::syntax_node_to_token_tree(x.syntax()).0
|
mbe::syntax_node_to_token_tree(it.syntax()).0
|
||||||
}
|
}
|
||||||
None => tt::Subtree::empty(),
|
None => tt::Subtree::empty(),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let bounds = match ¶m {
|
let bounds = match ¶m {
|
||||||
ast::TypeOrConstParam::Type(x) => {
|
ast::TypeOrConstParam::Type(it) => {
|
||||||
x.type_bound_list().map(|x| mbe::syntax_node_to_token_tree(x.syntax()).0)
|
it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
|
||||||
}
|
}
|
||||||
ast::TypeOrConstParam::Const(_) => None,
|
ast::TypeOrConstParam::Const(_) => None,
|
||||||
};
|
};
|
||||||
|
@ -296,9 +282,9 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
|
||||||
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
|
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
|
||||||
param_type_set.contains(&name).then_some(p)
|
param_type_set.contains(&name).then_some(p)
|
||||||
})
|
})
|
||||||
.map(|x| mbe::syntax_node_to_token_tree(x.syntax()).0)
|
.map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
|
||||||
.collect();
|
.collect();
|
||||||
let name_token = name_to_token(&token_map, name)?;
|
let name_token = name_to_token(&tm, name)?;
|
||||||
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
|
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -345,11 +331,12 @@ fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Id
|
||||||
/// where B1, ..., BN are the bounds given by `bounds_paths`. Z is a phantom type, and
|
/// where B1, ..., BN are the bounds given by `bounds_paths`. Z is a phantom type, and
|
||||||
/// therefore does not get bound by the derived trait.
|
/// therefore does not get bound by the derived trait.
|
||||||
fn expand_simple_derive(
|
fn expand_simple_derive(
|
||||||
tt: &tt::Subtree,
|
tt: &ast::Adt,
|
||||||
|
tm: &TokenMap,
|
||||||
trait_path: tt::Subtree,
|
trait_path: tt::Subtree,
|
||||||
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree,
|
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let info = match parse_adt(tt) {
|
let info = match parse_adt(tm, tt) {
|
||||||
Ok(info) => info,
|
Ok(info) => info,
|
||||||
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
|
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
|
||||||
};
|
};
|
||||||
|
@ -373,10 +360,10 @@ fn expand_simple_derive(
|
||||||
})
|
})
|
||||||
.unzip();
|
.unzip();
|
||||||
|
|
||||||
where_block.extend(info.associated_types.iter().map(|x| {
|
where_block.extend(info.associated_types.iter().map(|it| {
|
||||||
let x = x.clone();
|
let it = it.clone();
|
||||||
let bound = trait_path.clone();
|
let bound = trait_path.clone();
|
||||||
quote! { #x : #bound , }
|
quote! { #it : #bound , }
|
||||||
}));
|
}));
|
||||||
|
|
||||||
let name = info.name;
|
let name = info.name;
|
||||||
|
@ -405,19 +392,21 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree
|
||||||
fn copy_expand(
|
fn copy_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &ast::Adt,
|
||||||
|
tm: &TokenMap,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let krate = find_builtin_crate(db, id);
|
let krate = find_builtin_crate(db, id);
|
||||||
expand_simple_derive(tt, quote! { #krate::marker::Copy }, |_| quote! {})
|
expand_simple_derive(tt, tm, quote! { #krate::marker::Copy }, |_| quote! {})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn clone_expand(
|
fn clone_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &ast::Adt,
|
||||||
|
tm: &TokenMap,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let krate = find_builtin_crate(db, id);
|
let krate = find_builtin_crate(db, id);
|
||||||
expand_simple_derive(tt, quote! { #krate::clone::Clone }, |adt| {
|
expand_simple_derive(tt, tm, quote! { #krate::clone::Clone }, |adt| {
|
||||||
if matches!(adt.shape, AdtShape::Union) {
|
if matches!(adt.shape, AdtShape::Union) {
|
||||||
let star = tt::Punct {
|
let star = tt::Punct {
|
||||||
char: '*',
|
char: '*',
|
||||||
|
@ -444,7 +433,7 @@ fn clone_expand(
|
||||||
}
|
}
|
||||||
let name = &adt.name;
|
let name = &adt.name;
|
||||||
let patterns = adt.shape.as_pattern(name);
|
let patterns = adt.shape.as_pattern(name);
|
||||||
let exprs = adt.shape.as_pattern_map(name, |x| quote! { #x .clone() });
|
let exprs = adt.shape.as_pattern_map(name, |it| quote! { #it .clone() });
|
||||||
let arms = patterns.into_iter().zip(exprs.into_iter()).map(|(pat, expr)| {
|
let arms = patterns.into_iter().zip(exprs.into_iter()).map(|(pat, expr)| {
|
||||||
let fat_arrow = fat_arrow();
|
let fat_arrow = fat_arrow();
|
||||||
quote! {
|
quote! {
|
||||||
|
@ -479,10 +468,11 @@ fn and_and() -> ::tt::Subtree<TokenId> {
|
||||||
fn default_expand(
|
fn default_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &ast::Adt,
|
||||||
|
tm: &TokenMap,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let krate = &find_builtin_crate(db, id);
|
let krate = &find_builtin_crate(db, id);
|
||||||
expand_simple_derive(tt, quote! { #krate::default::Default }, |adt| {
|
expand_simple_derive(tt, tm, quote! { #krate::default::Default }, |adt| {
|
||||||
let body = match &adt.shape {
|
let body = match &adt.shape {
|
||||||
AdtShape::Struct(fields) => {
|
AdtShape::Struct(fields) => {
|
||||||
let name = &adt.name;
|
let name = &adt.name;
|
||||||
|
@ -518,16 +508,17 @@ fn default_expand(
|
||||||
fn debug_expand(
|
fn debug_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &ast::Adt,
|
||||||
|
tm: &TokenMap,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let krate = &find_builtin_crate(db, id);
|
let krate = &find_builtin_crate(db, id);
|
||||||
expand_simple_derive(tt, quote! { #krate::fmt::Debug }, |adt| {
|
expand_simple_derive(tt, tm, quote! { #krate::fmt::Debug }, |adt| {
|
||||||
let for_variant = |name: String, v: &VariantShape| match v {
|
let for_variant = |name: String, v: &VariantShape| match v {
|
||||||
VariantShape::Struct(fields) => {
|
VariantShape::Struct(fields) => {
|
||||||
let for_fields = fields.iter().map(|x| {
|
let for_fields = fields.iter().map(|it| {
|
||||||
let x_string = x.to_string();
|
let x_string = it.to_string();
|
||||||
quote! {
|
quote! {
|
||||||
.field(#x_string, & #x)
|
.field(#x_string, & #it)
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
quote! {
|
quote! {
|
||||||
|
@ -535,9 +526,9 @@ fn debug_expand(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
VariantShape::Tuple(n) => {
|
VariantShape::Tuple(n) => {
|
||||||
let for_fields = tuple_field_iterator(*n).map(|x| {
|
let for_fields = tuple_field_iterator(*n).map(|it| {
|
||||||
quote! {
|
quote! {
|
||||||
.field( & #x)
|
.field( & #it)
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
quote! {
|
quote! {
|
||||||
|
@ -598,10 +589,11 @@ fn debug_expand(
|
||||||
fn hash_expand(
|
fn hash_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &ast::Adt,
|
||||||
|
tm: &TokenMap,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let krate = &find_builtin_crate(db, id);
|
let krate = &find_builtin_crate(db, id);
|
||||||
expand_simple_derive(tt, quote! { #krate::hash::Hash }, |adt| {
|
expand_simple_derive(tt, tm, quote! { #krate::hash::Hash }, |adt| {
|
||||||
if matches!(adt.shape, AdtShape::Union) {
|
if matches!(adt.shape, AdtShape::Union) {
|
||||||
// FIXME: Return expand error here
|
// FIXME: Return expand error here
|
||||||
return quote! {};
|
return quote! {};
|
||||||
|
@ -621,7 +613,7 @@ fn hash_expand(
|
||||||
let arms = adt.shape.as_pattern(&adt.name).into_iter().zip(adt.shape.field_names()).map(
|
let arms = adt.shape.as_pattern(&adt.name).into_iter().zip(adt.shape.field_names()).map(
|
||||||
|(pat, names)| {
|
|(pat, names)| {
|
||||||
let expr = {
|
let expr = {
|
||||||
let it = names.iter().map(|x| quote! { #x . hash(ra_expand_state); });
|
let it = names.iter().map(|it| quote! { #it . hash(ra_expand_state); });
|
||||||
quote! { {
|
quote! { {
|
||||||
##it
|
##it
|
||||||
} }
|
} }
|
||||||
|
@ -632,9 +624,14 @@ fn hash_expand(
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
let check_discriminant = if matches!(&adt.shape, AdtShape::Enum { .. }) {
|
||||||
|
quote! { #krate::mem::discriminant(self).hash(ra_expand_state); }
|
||||||
|
} else {
|
||||||
|
quote! {}
|
||||||
|
};
|
||||||
quote! {
|
quote! {
|
||||||
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
|
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
|
||||||
#krate::mem::discriminant(self).hash(ra_expand_state);
|
#check_discriminant
|
||||||
match self {
|
match self {
|
||||||
##arms
|
##arms
|
||||||
}
|
}
|
||||||
|
@ -646,19 +643,21 @@ fn hash_expand(
|
||||||
fn eq_expand(
|
fn eq_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &ast::Adt,
|
||||||
|
tm: &TokenMap,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let krate = find_builtin_crate(db, id);
|
let krate = find_builtin_crate(db, id);
|
||||||
expand_simple_derive(tt, quote! { #krate::cmp::Eq }, |_| quote! {})
|
expand_simple_derive(tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn partial_eq_expand(
|
fn partial_eq_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &ast::Adt,
|
||||||
|
tm: &TokenMap,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let krate = find_builtin_crate(db, id);
|
let krate = find_builtin_crate(db, id);
|
||||||
expand_simple_derive(tt, quote! { #krate::cmp::PartialEq }, |adt| {
|
expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialEq }, |adt| {
|
||||||
if matches!(adt.shape, AdtShape::Union) {
|
if matches!(adt.shape, AdtShape::Union) {
|
||||||
// FIXME: Return expand error here
|
// FIXME: Return expand error here
|
||||||
return quote! {};
|
return quote! {};
|
||||||
|
@ -674,9 +673,9 @@ fn partial_eq_expand(
|
||||||
quote!(true)
|
quote!(true)
|
||||||
}
|
}
|
||||||
[first, rest @ ..] => {
|
[first, rest @ ..] => {
|
||||||
let rest = rest.iter().map(|x| {
|
let rest = rest.iter().map(|it| {
|
||||||
let t1 = Ident::new(format!("{}_self", x.text), x.span);
|
let t1 = Ident::new(format!("{}_self", it.text), it.span);
|
||||||
let t2 = Ident::new(format!("{}_other", x.text), x.span);
|
let t2 = Ident::new(format!("{}_other", it.text), it.span);
|
||||||
let and_and = and_and();
|
let and_and = and_and();
|
||||||
quote!(#and_and #t1 .eq( #t2 ))
|
quote!(#and_and #t1 .eq( #t2 ))
|
||||||
});
|
});
|
||||||
|
@ -708,12 +707,12 @@ fn self_and_other_patterns(
|
||||||
adt: &BasicAdtInfo,
|
adt: &BasicAdtInfo,
|
||||||
name: &tt::Ident,
|
name: &tt::Ident,
|
||||||
) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) {
|
) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) {
|
||||||
let self_patterns = adt.shape.as_pattern_map(name, |x| {
|
let self_patterns = adt.shape.as_pattern_map(name, |it| {
|
||||||
let t = Ident::new(format!("{}_self", x.text), x.span);
|
let t = Ident::new(format!("{}_self", it.text), it.span);
|
||||||
quote!(#t)
|
quote!(#t)
|
||||||
});
|
});
|
||||||
let other_patterns = adt.shape.as_pattern_map(name, |x| {
|
let other_patterns = adt.shape.as_pattern_map(name, |it| {
|
||||||
let t = Ident::new(format!("{}_other", x.text), x.span);
|
let t = Ident::new(format!("{}_other", it.text), it.span);
|
||||||
quote!(#t)
|
quote!(#t)
|
||||||
});
|
});
|
||||||
(self_patterns, other_patterns)
|
(self_patterns, other_patterns)
|
||||||
|
@ -722,10 +721,11 @@ fn self_and_other_patterns(
|
||||||
fn ord_expand(
|
fn ord_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &ast::Adt,
|
||||||
|
tm: &TokenMap,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let krate = &find_builtin_crate(db, id);
|
let krate = &find_builtin_crate(db, id);
|
||||||
expand_simple_derive(tt, quote! { #krate::cmp::Ord }, |adt| {
|
expand_simple_derive(tt, tm, quote! { #krate::cmp::Ord }, |adt| {
|
||||||
fn compare(
|
fn compare(
|
||||||
krate: &tt::TokenTree,
|
krate: &tt::TokenTree,
|
||||||
left: tt::Subtree,
|
left: tt::Subtree,
|
||||||
|
@ -747,9 +747,6 @@ fn ord_expand(
|
||||||
// FIXME: Return expand error here
|
// FIXME: Return expand error here
|
||||||
return quote!();
|
return quote!();
|
||||||
}
|
}
|
||||||
let left = quote!(#krate::intrinsics::discriminant_value(self));
|
|
||||||
let right = quote!(#krate::intrinsics::discriminant_value(other));
|
|
||||||
|
|
||||||
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name);
|
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name);
|
||||||
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
|
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
|
||||||
|(pat1, pat2, fields)| {
|
|(pat1, pat2, fields)| {
|
||||||
|
@ -764,17 +761,17 @@ fn ord_expand(
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
let fat_arrow = fat_arrow();
|
let fat_arrow = fat_arrow();
|
||||||
let body = compare(
|
let mut body = quote! {
|
||||||
krate,
|
|
||||||
left,
|
|
||||||
right,
|
|
||||||
quote! {
|
|
||||||
match (self, other) {
|
match (self, other) {
|
||||||
##arms
|
##arms
|
||||||
_unused #fat_arrow #krate::cmp::Ordering::Equal
|
_unused #fat_arrow #krate::cmp::Ordering::Equal
|
||||||
}
|
}
|
||||||
},
|
};
|
||||||
);
|
if matches!(&adt.shape, AdtShape::Enum { .. }) {
|
||||||
|
let left = quote!(#krate::intrinsics::discriminant_value(self));
|
||||||
|
let right = quote!(#krate::intrinsics::discriminant_value(other));
|
||||||
|
body = compare(krate, left, right, body);
|
||||||
|
}
|
||||||
quote! {
|
quote! {
|
||||||
fn cmp(&self, other: &Self) -> #krate::cmp::Ordering {
|
fn cmp(&self, other: &Self) -> #krate::cmp::Ordering {
|
||||||
#body
|
#body
|
||||||
|
@ -786,10 +783,11 @@ fn ord_expand(
|
||||||
fn partial_ord_expand(
|
fn partial_ord_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &ast::Adt,
|
||||||
|
tm: &TokenMap,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let krate = &find_builtin_crate(db, id);
|
let krate = &find_builtin_crate(db, id);
|
||||||
expand_simple_derive(tt, quote! { #krate::cmp::PartialOrd }, |adt| {
|
expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| {
|
||||||
fn compare(
|
fn compare(
|
||||||
krate: &tt::TokenTree,
|
krate: &tt::TokenTree,
|
||||||
left: tt::Subtree,
|
left: tt::Subtree,
|
||||||
|
|
|
@ -339,7 +339,7 @@ fn format_args_expand_general(
|
||||||
parts.push(mem::take(&mut last_part));
|
parts.push(mem::take(&mut last_part));
|
||||||
let arg_tree = if argument.is_empty() {
|
let arg_tree = if argument.is_empty() {
|
||||||
match args.next() {
|
match args.next() {
|
||||||
Some(x) => x,
|
Some(it) => it,
|
||||||
None => {
|
None => {
|
||||||
err = Some(mbe::ExpandError::NoMatchingRule.into());
|
err = Some(mbe::ExpandError::NoMatchingRule.into());
|
||||||
tt::Subtree::empty()
|
tt::Subtree::empty()
|
||||||
|
@ -361,7 +361,7 @@ fn format_args_expand_general(
|
||||||
quote!(::core::fmt::Display::fmt)
|
quote!(::core::fmt::Display::fmt)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
arg_tts.push(quote! { ::core::fmt::Argument::new(&(#arg_tree), #formatter), });
|
arg_tts.push(quote! { ::core::fmt::ArgumentV1::new(&(#arg_tree), #formatter), });
|
||||||
}
|
}
|
||||||
'}' => {
|
'}' => {
|
||||||
if format_iter.peek() == Some(&'}') {
|
if format_iter.peek() == Some(&'}') {
|
||||||
|
@ -378,11 +378,11 @@ fn format_args_expand_general(
|
||||||
if !last_part.is_empty() {
|
if !last_part.is_empty() {
|
||||||
parts.push(last_part);
|
parts.push(last_part);
|
||||||
}
|
}
|
||||||
let part_tts = parts.into_iter().map(|x| {
|
let part_tts = parts.into_iter().map(|it| {
|
||||||
let text = if let Some(raw) = &raw_sharps {
|
let text = if let Some(raw) = &raw_sharps {
|
||||||
format!("r{raw}\"{}\"{raw}", x).into()
|
format!("r{raw}\"{}\"{raw}", it).into()
|
||||||
} else {
|
} else {
|
||||||
format!("\"{}\"", x).into()
|
format!("\"{}\"", it).into()
|
||||||
};
|
};
|
||||||
let l = tt::Literal { span: tt::TokenId::unspecified(), text };
|
let l = tt::Literal { span: tt::TokenId::unspecified(), text };
|
||||||
quote!(#l ,)
|
quote!(#l ,)
|
||||||
|
@ -574,7 +574,7 @@ fn concat_bytes_expand(
|
||||||
syntax::SyntaxKind::BYTE => bytes.push(token.text().to_string()),
|
syntax::SyntaxKind::BYTE => bytes.push(token.text().to_string()),
|
||||||
syntax::SyntaxKind::BYTE_STRING => {
|
syntax::SyntaxKind::BYTE_STRING => {
|
||||||
let components = unquote_byte_string(lit).unwrap_or_default();
|
let components = unquote_byte_string(lit).unwrap_or_default();
|
||||||
components.into_iter().for_each(|x| bytes.push(x.to_string()));
|
components.into_iter().for_each(|it| bytes.push(it.to_string()));
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
|
err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
|
||||||
|
@ -692,7 +692,7 @@ pub(crate) fn include_arg_to_tt(
|
||||||
arg_id: MacroCallId,
|
arg_id: MacroCallId,
|
||||||
) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> {
|
) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> {
|
||||||
let loc = db.lookup_intern_macro_call(arg_id);
|
let loc = db.lookup_intern_macro_call(arg_id);
|
||||||
let Some(EagerCallInfo {arg, arg_id: Some(arg_id), .. }) = loc.eager.as_deref() else {
|
let Some(EagerCallInfo { arg,arg_id, .. }) = loc.eager.as_deref() else {
|
||||||
panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager);
|
panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager);
|
||||||
};
|
};
|
||||||
let path = parse_string(&arg.0)?;
|
let path = parse_string(&arg.0)?;
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
//! Defines database & queries for macro expansion.
|
//! Defines database & queries for macro expansion.
|
||||||
|
|
||||||
use base_db::{salsa, Edition, SourceDatabase};
|
use base_db::{salsa, CrateId, Edition, SourceDatabase};
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use limit::Limit;
|
use limit::Limit;
|
||||||
use mbe::syntax_node_to_token_tree;
|
use mbe::{syntax_node_to_token_tree, ValueResult};
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasAttrs, HasDocComments},
|
ast::{self, HasAttrs, HasDocComments},
|
||||||
|
@ -13,7 +13,7 @@ use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
|
ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
|
||||||
builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, BuiltinAttrExpander,
|
builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander,
|
||||||
BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
|
BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
|
||||||
ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
|
ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
|
||||||
MacroDefKind, MacroFile, ProcMacroExpander,
|
MacroDefKind, MacroFile, ProcMacroExpander,
|
||||||
|
@ -28,61 +28,67 @@ use crate::{
|
||||||
static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
|
static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
pub enum TokenExpander {
|
|
||||||
/// Old-style `macro_rules` or the new macros 2.0
|
/// Old-style `macro_rules` or the new macros 2.0
|
||||||
DeclarativeMacro { mac: mbe::DeclarativeMacro, def_site_token_map: mbe::TokenMap },
|
pub struct DeclarativeMacroExpander {
|
||||||
|
pub mac: mbe::DeclarativeMacro,
|
||||||
|
pub def_site_token_map: mbe::TokenMap,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DeclarativeMacroExpander {
|
||||||
|
pub fn expand(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||||
|
match self.mac.err() {
|
||||||
|
Some(e) => ExpandResult::new(
|
||||||
|
tt::Subtree::empty(),
|
||||||
|
ExpandError::other(format!("invalid macro definition: {e}")),
|
||||||
|
),
|
||||||
|
None => self.mac.expand(tt).map_err(Into::into),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn map_id_down(&self, token_id: tt::TokenId) -> tt::TokenId {
|
||||||
|
self.mac.map_id_down(token_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn map_id_up(&self, token_id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
|
||||||
|
self.mac.map_id_up(token_id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
|
pub enum TokenExpander {
|
||||||
|
DeclarativeMacro(Arc<DeclarativeMacroExpander>),
|
||||||
/// Stuff like `line!` and `file!`.
|
/// Stuff like `line!` and `file!`.
|
||||||
Builtin(BuiltinFnLikeExpander),
|
BuiltIn(BuiltinFnLikeExpander),
|
||||||
/// Built-in eagerly expanded fn-like macros (`include!`, `concat!`, etc.)
|
/// Built-in eagerly expanded fn-like macros (`include!`, `concat!`, etc.)
|
||||||
BuiltinEager(EagerExpander),
|
BuiltInEager(EagerExpander),
|
||||||
/// `global_allocator` and such.
|
/// `global_allocator` and such.
|
||||||
BuiltinAttr(BuiltinAttrExpander),
|
BuiltInAttr(BuiltinAttrExpander),
|
||||||
/// `derive(Copy)` and such.
|
/// `derive(Copy)` and such.
|
||||||
BuiltinDerive(BuiltinDeriveExpander),
|
BuiltInDerive(BuiltinDeriveExpander),
|
||||||
/// The thing we love the most here in rust-analyzer -- procedural macros.
|
/// The thing we love the most here in rust-analyzer -- procedural macros.
|
||||||
ProcMacro(ProcMacroExpander),
|
ProcMacro(ProcMacroExpander),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: Get rid of these methods
|
||||||
impl TokenExpander {
|
impl TokenExpander {
|
||||||
fn expand(
|
|
||||||
&self,
|
|
||||||
db: &dyn ExpandDatabase,
|
|
||||||
id: MacroCallId,
|
|
||||||
tt: &tt::Subtree,
|
|
||||||
) -> ExpandResult<tt::Subtree> {
|
|
||||||
match self {
|
|
||||||
TokenExpander::DeclarativeMacro { mac, .. } => mac.expand(tt).map_err(Into::into),
|
|
||||||
TokenExpander::Builtin(it) => it.expand(db, id, tt).map_err(Into::into),
|
|
||||||
TokenExpander::BuiltinEager(it) => it.expand(db, id, tt).map_err(Into::into),
|
|
||||||
TokenExpander::BuiltinAttr(it) => it.expand(db, id, tt),
|
|
||||||
TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt),
|
|
||||||
TokenExpander::ProcMacro(_) => {
|
|
||||||
// We store the result in salsa db to prevent non-deterministic behavior in
|
|
||||||
// some proc-macro implementation
|
|
||||||
// See #4315 for details
|
|
||||||
db.expand_proc_macro(id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
|
pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
|
||||||
match self {
|
match self {
|
||||||
TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_down(id),
|
TokenExpander::DeclarativeMacro(expander) => expander.map_id_down(id),
|
||||||
TokenExpander::Builtin(..)
|
TokenExpander::BuiltIn(..)
|
||||||
| TokenExpander::BuiltinEager(..)
|
| TokenExpander::BuiltInEager(..)
|
||||||
| TokenExpander::BuiltinAttr(..)
|
| TokenExpander::BuiltInAttr(..)
|
||||||
| TokenExpander::BuiltinDerive(..)
|
| TokenExpander::BuiltInDerive(..)
|
||||||
| TokenExpander::ProcMacro(..) => id,
|
| TokenExpander::ProcMacro(..) => id,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
|
pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
|
||||||
match self {
|
match self {
|
||||||
TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_up(id),
|
TokenExpander::DeclarativeMacro(expander) => expander.map_id_up(id),
|
||||||
TokenExpander::Builtin(..)
|
TokenExpander::BuiltIn(..)
|
||||||
| TokenExpander::BuiltinEager(..)
|
| TokenExpander::BuiltInEager(..)
|
||||||
| TokenExpander::BuiltinAttr(..)
|
| TokenExpander::BuiltInAttr(..)
|
||||||
| TokenExpander::BuiltinDerive(..)
|
| TokenExpander::BuiltInDerive(..)
|
||||||
| TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
|
| TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -118,14 +124,26 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
fn macro_arg(
|
fn macro_arg(
|
||||||
&self,
|
&self,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>;
|
) -> ValueResult<
|
||||||
|
Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
|
||||||
|
Arc<Box<[SyntaxError]>>,
|
||||||
|
>;
|
||||||
/// Extracts syntax node, corresponding to a macro call. That's a firewall
|
/// Extracts syntax node, corresponding to a macro call. That's a firewall
|
||||||
/// query, only typing in the macro call itself changes the returned
|
/// query, only typing in the macro call itself changes the returned
|
||||||
/// subtree.
|
/// subtree.
|
||||||
fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>;
|
fn macro_arg_node(
|
||||||
/// Gets the expander for this macro. This compiles declarative macros, and
|
&self,
|
||||||
/// just fetches procedural ones.
|
id: MacroCallId,
|
||||||
fn macro_def(&self, id: MacroDefId) -> Result<Arc<TokenExpander>, mbe::ParseError>;
|
) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>>;
|
||||||
|
/// Fetches the expander for this macro.
|
||||||
|
#[salsa::transparent]
|
||||||
|
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
|
||||||
|
/// Fetches (and compiles) the expander of this decl macro.
|
||||||
|
fn decl_macro_expander(
|
||||||
|
&self,
|
||||||
|
def_crate: CrateId,
|
||||||
|
id: AstId<ast::Macro>,
|
||||||
|
) -> Arc<DeclarativeMacroExpander>;
|
||||||
|
|
||||||
/// Expand macro call to a token tree.
|
/// Expand macro call to a token tree.
|
||||||
// This query is LRU cached
|
// This query is LRU cached
|
||||||
|
@ -141,8 +159,8 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
/// Special case of the previous query for procedural macros. We can't LRU
|
/// Special case of the previous query for procedural macros. We can't LRU
|
||||||
/// proc macros, since they are not deterministic in general, and
|
/// proc macros, since they are not deterministic in general, and
|
||||||
/// non-determinism breaks salsa in a very, very, very bad way.
|
/// non-determinism breaks salsa in a very, very, very bad way.
|
||||||
/// @edwin0cheng heroically debugged this once!
|
/// @edwin0cheng heroically debugged this once! See #4315 for details
|
||||||
fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<tt::Subtree>;
|
fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
|
||||||
/// Firewall query that returns the errors from the `parse_macro_expansion` query.
|
/// Firewall query that returns the errors from the `parse_macro_expansion` query.
|
||||||
fn parse_macro_expansion_error(
|
fn parse_macro_expansion_error(
|
||||||
&self,
|
&self,
|
||||||
|
@ -163,7 +181,6 @@ pub fn expand_speculative(
|
||||||
token_to_map: SyntaxToken,
|
token_to_map: SyntaxToken,
|
||||||
) -> Option<(SyntaxNode, SyntaxToken)> {
|
) -> Option<(SyntaxNode, SyntaxToken)> {
|
||||||
let loc = db.lookup_intern_macro_call(actual_macro_call);
|
let loc = db.lookup_intern_macro_call(actual_macro_call);
|
||||||
let macro_def = db.macro_def(loc.def).ok()?;
|
|
||||||
let token_range = token_to_map.text_range();
|
let token_range = token_to_map.text_range();
|
||||||
|
|
||||||
// Build the subtree and token mapping for the speculative args
|
// Build the subtree and token mapping for the speculative args
|
||||||
|
@ -221,7 +238,12 @@ pub fn expand_speculative(
|
||||||
None => {
|
None => {
|
||||||
let range = token_range.checked_sub(speculative_args.text_range().start())?;
|
let range = token_range.checked_sub(speculative_args.text_range().start())?;
|
||||||
let token_id = spec_args_tmap.token_by_range(range)?;
|
let token_id = spec_args_tmap.token_by_range(range)?;
|
||||||
macro_def.map_id_down(token_id)
|
match loc.def.kind {
|
||||||
|
MacroDefKind::Declarative(it) => {
|
||||||
|
db.decl_macro_expander(loc.krate, it).map_id_down(token_id)
|
||||||
|
}
|
||||||
|
_ => token_id,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -235,7 +257,17 @@ pub fn expand_speculative(
|
||||||
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
|
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
|
||||||
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
|
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
|
||||||
}
|
}
|
||||||
_ => macro_def.expand(db, actual_macro_call, &tt),
|
MacroDefKind::BuiltInDerive(expander, ..) => {
|
||||||
|
// this cast is a bit sus, can we avoid losing the typedness here?
|
||||||
|
let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
|
||||||
|
expander.expand(db, actual_macro_call, &adt, &spec_args_tmap)
|
||||||
|
}
|
||||||
|
MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt),
|
||||||
|
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
|
||||||
|
MacroDefKind::BuiltInEager(it, _) => {
|
||||||
|
it.expand(db, actual_macro_call, &tt).map_err(Into::into)
|
||||||
|
}
|
||||||
|
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt),
|
||||||
};
|
};
|
||||||
|
|
||||||
let expand_to = macro_expand_to(db, actual_macro_call);
|
let expand_to = macro_expand_to(db, actual_macro_call);
|
||||||
|
@ -297,17 +329,31 @@ fn parse_macro_expansion(
|
||||||
ExpandResult { value: (parse, Arc::new(rev_token_map)), err }
|
ExpandResult { value: (parse, Arc::new(rev_token_map)), err }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn parse_macro_expansion_error(
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
|
macro_call_id: MacroCallId,
|
||||||
|
) -> ExpandResult<Box<[SyntaxError]>> {
|
||||||
|
db.parse_macro_expansion(MacroFile { macro_call_id })
|
||||||
|
.map(|it| it.0.errors().to_vec().into_boxed_slice())
|
||||||
|
}
|
||||||
|
|
||||||
fn macro_arg(
|
fn macro_arg(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>> {
|
) -> ValueResult<
|
||||||
|
Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
|
||||||
|
Arc<Box<[SyntaxError]>>,
|
||||||
|
> {
|
||||||
let loc = db.lookup_intern_macro_call(id);
|
let loc = db.lookup_intern_macro_call(id);
|
||||||
|
|
||||||
if let Some(EagerCallInfo { arg, arg_id: Some(_), error: _ }) = loc.eager.as_deref() {
|
if let Some(EagerCallInfo { arg, arg_id: _, error: _ }) = loc.eager.as_deref() {
|
||||||
return Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default())));
|
return ValueResult::ok(Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default()))));
|
||||||
}
|
}
|
||||||
|
|
||||||
let arg = db.macro_arg_text(id)?;
|
let ValueResult { value, err } = db.macro_arg_node(id);
|
||||||
|
let Some(arg) = value else {
|
||||||
|
return ValueResult { value: None, err };
|
||||||
|
};
|
||||||
|
|
||||||
let node = SyntaxNode::new_root(arg);
|
let node = SyntaxNode::new_root(arg);
|
||||||
let censor = censor_for_macro_input(&loc, &node);
|
let censor = censor_for_macro_input(&loc, &node);
|
||||||
|
@ -325,9 +371,16 @@ fn macro_arg(
|
||||||
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
||||||
tt.delimiter = tt::Delimiter::unspecified();
|
tt.delimiter = tt::Delimiter::unspecified();
|
||||||
}
|
}
|
||||||
Some(Arc::new((tt, tmap, fixups.undo_info)))
|
let val = Some(Arc::new((tt, tmap, fixups.undo_info)));
|
||||||
|
match err {
|
||||||
|
Some(err) => ValueResult::new(val, err),
|
||||||
|
None => ValueResult::ok(val),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
|
||||||
|
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
|
||||||
|
/// - attributes expect the invoking attribute to be stripped
|
||||||
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
|
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
|
||||||
// FIXME: handle `cfg_attr`
|
// FIXME: handle `cfg_attr`
|
||||||
(|| {
|
(|| {
|
||||||
|
@ -364,9 +417,44 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn macro_arg_text(db: &dyn ExpandDatabase, id: MacroCallId) -> Option<GreenNode> {
|
fn macro_arg_node(
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
|
id: MacroCallId,
|
||||||
|
) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>> {
|
||||||
|
let err = || -> Arc<Box<[_]>> {
|
||||||
|
Arc::new(Box::new([SyntaxError::new_at_offset(
|
||||||
|
"invalid macro call".to_owned(),
|
||||||
|
syntax::TextSize::from(0),
|
||||||
|
)]))
|
||||||
|
};
|
||||||
let loc = db.lookup_intern_macro_call(id);
|
let loc = db.lookup_intern_macro_call(id);
|
||||||
let arg = loc.kind.arg(db)?;
|
let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind {
|
||||||
|
let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
|
||||||
|
Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::Expr).0)
|
||||||
|
} else {
|
||||||
|
loc.kind
|
||||||
|
.arg(db)
|
||||||
|
.and_then(|arg| ast::TokenTree::cast(arg.value))
|
||||||
|
.map(|tt| tt.reparse_as_expr().to_syntax())
|
||||||
|
};
|
||||||
|
|
||||||
|
match res {
|
||||||
|
Some(res) if res.errors().is_empty() => res.syntax_node(),
|
||||||
|
Some(res) => {
|
||||||
|
return ValueResult::new(
|
||||||
|
Some(res.syntax_node().green().into()),
|
||||||
|
// Box::<[_]>::from(res.errors()), not stable yet
|
||||||
|
Arc::new(res.errors().to_vec().into_boxed_slice()),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
None => return ValueResult::only_err(err()),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
match loc.kind.arg(db) {
|
||||||
|
Some(res) => res.value,
|
||||||
|
None => return ValueResult::only_err(err()),
|
||||||
|
}
|
||||||
|
};
|
||||||
if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
|
if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
|
||||||
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
|
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
|
||||||
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
|
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
|
||||||
|
@ -381,101 +469,146 @@ fn macro_arg_text(db: &dyn ExpandDatabase, id: MacroCallId) -> Option<GreenNode>
|
||||||
// Some day, we'll have explicit recursion counters for all
|
// Some day, we'll have explicit recursion counters for all
|
||||||
// recursive things, at which point this code might be removed.
|
// recursive things, at which point this code might be removed.
|
||||||
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
|
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
|
||||||
return None;
|
return ValueResult::only_err(Arc::new(Box::new([SyntaxError::new(
|
||||||
|
"unbalanced token tree".to_owned(),
|
||||||
|
arg.text_range(),
|
||||||
|
)])));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
|
ValueResult::ok(Some(arg.green().into()))
|
||||||
Some(
|
|
||||||
mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::Expr)
|
|
||||||
.0
|
|
||||||
.syntax_node()
|
|
||||||
.green()
|
|
||||||
.into(),
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
Some(arg.green().into())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn macro_def(
|
fn decl_macro_expander(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroDefId,
|
def_crate: CrateId,
|
||||||
) -> Result<Arc<TokenExpander>, mbe::ParseError> {
|
id: AstId<ast::Macro>,
|
||||||
|
) -> Arc<DeclarativeMacroExpander> {
|
||||||
|
let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021;
|
||||||
|
let (mac, def_site_token_map) = match id.to_node(db) {
|
||||||
|
ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() {
|
||||||
|
Some(arg) => {
|
||||||
|
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
|
||||||
|
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
|
||||||
|
(mac, def_site_token_map)
|
||||||
|
}
|
||||||
|
None => (
|
||||||
|
mbe::DeclarativeMacro::from_err(
|
||||||
|
mbe::ParseError::Expected("expected a token tree".into()),
|
||||||
|
is_2021,
|
||||||
|
),
|
||||||
|
Default::default(),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
ast::Macro::MacroDef(macro_def) => match macro_def.body() {
|
||||||
|
Some(arg) => {
|
||||||
|
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
|
||||||
|
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
|
||||||
|
(mac, def_site_token_map)
|
||||||
|
}
|
||||||
|
None => (
|
||||||
|
mbe::DeclarativeMacro::from_err(
|
||||||
|
mbe::ParseError::Expected("expected a token tree".into()),
|
||||||
|
is_2021,
|
||||||
|
),
|
||||||
|
Default::default(),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
Arc::new(DeclarativeMacroExpander { mac, def_site_token_map })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
|
||||||
match id.kind {
|
match id.kind {
|
||||||
MacroDefKind::Declarative(ast_id) => {
|
MacroDefKind::Declarative(ast_id) => {
|
||||||
let is_2021 = db.crate_graph()[id.krate].edition >= Edition::Edition2021;
|
TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id))
|
||||||
let (mac, def_site_token_map) = match ast_id.to_node(db) {
|
|
||||||
ast::Macro::MacroRules(macro_rules) => {
|
|
||||||
let arg = macro_rules
|
|
||||||
.token_tree()
|
|
||||||
.ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?;
|
|
||||||
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
|
|
||||||
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021)?;
|
|
||||||
(mac, def_site_token_map)
|
|
||||||
}
|
}
|
||||||
ast::Macro::MacroDef(macro_def) => {
|
MacroDefKind::BuiltIn(expander, _) => TokenExpander::BuiltIn(expander),
|
||||||
let arg = macro_def
|
MacroDefKind::BuiltInAttr(expander, _) => TokenExpander::BuiltInAttr(expander),
|
||||||
.body()
|
MacroDefKind::BuiltInDerive(expander, _) => TokenExpander::BuiltInDerive(expander),
|
||||||
.ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?;
|
MacroDefKind::BuiltInEager(expander, ..) => TokenExpander::BuiltInEager(expander),
|
||||||
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
|
MacroDefKind::ProcMacro(expander, ..) => TokenExpander::ProcMacro(expander),
|
||||||
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021)?;
|
|
||||||
(mac, def_site_token_map)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
Ok(Arc::new(TokenExpander::DeclarativeMacro { mac, def_site_token_map }))
|
|
||||||
}
|
|
||||||
MacroDefKind::BuiltIn(expander, _) => Ok(Arc::new(TokenExpander::Builtin(expander))),
|
|
||||||
MacroDefKind::BuiltInAttr(expander, _) => {
|
|
||||||
Ok(Arc::new(TokenExpander::BuiltinAttr(expander)))
|
|
||||||
}
|
|
||||||
MacroDefKind::BuiltInDerive(expander, _) => {
|
|
||||||
Ok(Arc::new(TokenExpander::BuiltinDerive(expander)))
|
|
||||||
}
|
|
||||||
MacroDefKind::BuiltInEager(expander, ..) => {
|
|
||||||
Ok(Arc::new(TokenExpander::BuiltinEager(expander)))
|
|
||||||
}
|
|
||||||
MacroDefKind::ProcMacro(expander, ..) => Ok(Arc::new(TokenExpander::ProcMacro(expander))),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
|
fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
|
||||||
let _p = profile::span("macro_expand");
|
let _p = profile::span("macro_expand");
|
||||||
let loc = db.lookup_intern_macro_call(id);
|
let loc = db.lookup_intern_macro_call(id);
|
||||||
if let Some(EagerCallInfo { arg, arg_id: None, error }) = loc.eager.as_deref() {
|
|
||||||
// This is an input expansion for an eager macro. These are already pre-expanded
|
let ExpandResult { value: tt, mut err } = match loc.def.kind {
|
||||||
return ExpandResult { value: Arc::new(arg.0.clone()), err: error.clone() };
|
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id),
|
||||||
|
MacroDefKind::BuiltInDerive(expander, ..) => {
|
||||||
|
let arg = db.macro_arg_node(id).value.unwrap();
|
||||||
|
|
||||||
|
let node = SyntaxNode::new_root(arg);
|
||||||
|
let censor = censor_for_macro_input(&loc, &node);
|
||||||
|
let mut fixups = fixup::fixup_syntax(&node);
|
||||||
|
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
|
||||||
|
let (tmap, _) = mbe::syntax_node_to_token_map_with_modifications(
|
||||||
|
&node,
|
||||||
|
fixups.token_map,
|
||||||
|
fixups.next_id,
|
||||||
|
fixups.replace,
|
||||||
|
fixups.append,
|
||||||
|
);
|
||||||
|
|
||||||
|
// this cast is a bit sus, can we avoid losing the typedness here?
|
||||||
|
let adt = ast::Adt::cast(node).unwrap();
|
||||||
|
let mut res = expander.expand(db, id, &adt, &tmap);
|
||||||
|
fixup::reverse_fixups(&mut res.value, &tmap, &fixups.undo_info);
|
||||||
|
res
|
||||||
}
|
}
|
||||||
let expander = match db.macro_def(loc.def) {
|
_ => {
|
||||||
Ok(it) => it,
|
let ValueResult { value, err } = db.macro_arg(id);
|
||||||
// FIXME: We should make sure to enforce a variant that invalid macro
|
let Some(macro_arg) = value else {
|
||||||
// definitions do not get expanders that could reach this call path!
|
|
||||||
Err(err) => {
|
|
||||||
return ExpandResult {
|
return ExpandResult {
|
||||||
value: Arc::new(tt::Subtree {
|
value: Arc::new(tt::Subtree {
|
||||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
|
||||||
token_trees: vec![],
|
|
||||||
}),
|
|
||||||
err: Some(ExpandError::other(format!("invalid macro definition: {err}"))),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let Some(macro_arg) = db.macro_arg(id) else {
|
|
||||||
return ExpandResult {
|
|
||||||
value: Arc::new(
|
|
||||||
tt::Subtree {
|
|
||||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||||
token_trees: Vec::new(),
|
token_trees: Vec::new(),
|
||||||
},
|
}),
|
||||||
),
|
// FIXME: We should make sure to enforce an invariant that invalid macro
|
||||||
// FIXME: We should make sure to enforce a variant that invalid macro
|
|
||||||
// calls do not reach this call path!
|
// calls do not reach this call path!
|
||||||
err: Some(ExpandError::other(
|
err: Some(ExpandError::other("invalid token tree")),
|
||||||
"invalid token tree"
|
|
||||||
)),
|
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
let (arg_tt, arg_tm, undo_info) = &*macro_arg;
|
|
||||||
let ExpandResult { value: mut tt, mut err } = expander.expand(db, id, arg_tt);
|
let (arg, arg_tm, undo_info) = &*macro_arg;
|
||||||
|
let mut res = match loc.def.kind {
|
||||||
|
MacroDefKind::Declarative(id) => {
|
||||||
|
db.decl_macro_expander(loc.def.krate, id).expand(arg.clone())
|
||||||
|
}
|
||||||
|
MacroDefKind::BuiltIn(it, _) => it.expand(db, id, &arg).map_err(Into::into),
|
||||||
|
// This might look a bit odd, but we do not expand the inputs to eager macros here.
|
||||||
|
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
|
||||||
|
// That kind of expansion uses the ast id map of an eager macros input though which goes through
|
||||||
|
// the HirFileId machinery. As eager macro inputs are assigned a macro file id that query
|
||||||
|
// will end up going through here again, whereas we want to just want to inspect the raw input.
|
||||||
|
// As such we just return the input subtree here.
|
||||||
|
MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
|
||||||
|
let mut arg = arg.clone();
|
||||||
|
fixup::reverse_fixups(&mut arg, arg_tm, undo_info);
|
||||||
|
|
||||||
|
return ExpandResult {
|
||||||
|
value: Arc::new(arg),
|
||||||
|
err: err.map(|err| {
|
||||||
|
let mut buf = String::new();
|
||||||
|
for err in &**err {
|
||||||
|
use std::fmt::Write;
|
||||||
|
_ = write!(buf, "{}, ", err);
|
||||||
|
}
|
||||||
|
buf.pop();
|
||||||
|
buf.pop();
|
||||||
|
ExpandError::other(buf)
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into),
|
||||||
|
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg),
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
fixup::reverse_fixups(&mut res.value, arg_tm, undo_info);
|
||||||
|
res
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
if let Some(EagerCallInfo { error, .. }) = loc.eager.as_deref() {
|
if let Some(EagerCallInfo { error, .. }) = loc.eager.as_deref() {
|
||||||
// FIXME: We should report both errors!
|
// FIXME: We should report both errors!
|
||||||
|
@ -483,48 +616,29 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set a hard limit for the expanded tt
|
// Set a hard limit for the expanded tt
|
||||||
let count = tt.count();
|
if let Err(value) = check_tt_count(&tt) {
|
||||||
if TOKEN_LIMIT.check(count).is_err() {
|
return value;
|
||||||
return ExpandResult {
|
|
||||||
value: Arc::new(tt::Subtree {
|
|
||||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
|
||||||
token_trees: vec![],
|
|
||||||
}),
|
|
||||||
err: Some(ExpandError::other(format!(
|
|
||||||
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
|
|
||||||
count,
|
|
||||||
TOKEN_LIMIT.inner(),
|
|
||||||
))),
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fixup::reverse_fixups(&mut tt, arg_tm, undo_info);
|
|
||||||
|
|
||||||
ExpandResult { value: Arc::new(tt), err }
|
ExpandResult { value: Arc::new(tt), err }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_macro_expansion_error(
|
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
|
||||||
db: &dyn ExpandDatabase,
|
|
||||||
macro_call_id: MacroCallId,
|
|
||||||
) -> ExpandResult<Box<[SyntaxError]>> {
|
|
||||||
db.parse_macro_expansion(MacroFile { macro_call_id })
|
|
||||||
.map(|it| it.0.errors().to_vec().into_boxed_slice())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<tt::Subtree> {
|
|
||||||
let loc = db.lookup_intern_macro_call(id);
|
let loc = db.lookup_intern_macro_call(id);
|
||||||
let Some(macro_arg) = db.macro_arg(id) else {
|
let Some(macro_arg) = db.macro_arg(id).value else {
|
||||||
return ExpandResult {
|
return ExpandResult {
|
||||||
value: tt::Subtree {
|
value: Arc::new(tt::Subtree {
|
||||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||||
token_trees: Vec::new(),
|
token_trees: Vec::new(),
|
||||||
},
|
}),
|
||||||
err: Some(ExpandError::other(
|
// FIXME: We should make sure to enforce an invariant that invalid macro
|
||||||
"invalid token tree"
|
// calls do not reach this call path!
|
||||||
)),
|
err: Some(ExpandError::other("invalid token tree")),
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let (arg_tt, arg_tm, undo_info) = &*macro_arg;
|
||||||
|
|
||||||
let expander = match loc.def.kind {
|
let expander = match loc.def.kind {
|
||||||
MacroDefKind::ProcMacro(expander, ..) => expander,
|
MacroDefKind::ProcMacro(expander, ..) => expander,
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
|
@ -533,13 +647,23 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<t
|
||||||
let attr_arg = match &loc.kind {
|
let attr_arg = match &loc.kind {
|
||||||
MacroCallKind::Attr { attr_args, .. } => {
|
MacroCallKind::Attr { attr_args, .. } => {
|
||||||
let mut attr_args = attr_args.0.clone();
|
let mut attr_args = attr_args.0.clone();
|
||||||
mbe::Shift::new(¯o_arg.0).shift_all(&mut attr_args);
|
mbe::Shift::new(arg_tt).shift_all(&mut attr_args);
|
||||||
Some(attr_args)
|
Some(attr_args)
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
expander.expand(db, loc.def.krate, loc.krate, ¯o_arg.0, attr_arg.as_ref())
|
let ExpandResult { value: mut tt, err } =
|
||||||
|
expander.expand(db, loc.def.krate, loc.krate, arg_tt, attr_arg.as_ref());
|
||||||
|
|
||||||
|
// Set a hard limit for the expanded tt
|
||||||
|
if let Err(value) = check_tt_count(&tt) {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
fixup::reverse_fixups(&mut tt, arg_tm, undo_info);
|
||||||
|
|
||||||
|
ExpandResult { value: Arc::new(tt), err }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
|
fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
|
||||||
|
@ -563,3 +687,22 @@ fn token_tree_to_syntax_node(
|
||||||
};
|
};
|
||||||
mbe::token_tree_to_syntax_node(tt, entry_point)
|
mbe::token_tree_to_syntax_node(tt, entry_point)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>> {
|
||||||
|
let count = tt.count();
|
||||||
|
if TOKEN_LIMIT.check(count).is_err() {
|
||||||
|
Err(ExpandResult {
|
||||||
|
value: Arc::new(tt::Subtree {
|
||||||
|
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||||
|
token_trees: vec![],
|
||||||
|
}),
|
||||||
|
err: Some(ExpandError::other(format!(
|
||||||
|
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
|
||||||
|
count,
|
||||||
|
TOKEN_LIMIT.inner(),
|
||||||
|
))),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -19,7 +19,8 @@
|
||||||
//!
|
//!
|
||||||
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
|
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
|
||||||
use base_db::CrateId;
|
use base_db::CrateId;
|
||||||
use syntax::{ted, Parse, SyntaxNode};
|
use rustc_hash::FxHashMap;
|
||||||
|
use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -38,19 +39,8 @@ pub fn expand_eager_macro_input(
|
||||||
def: MacroDefId,
|
def: MacroDefId,
|
||||||
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
|
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
|
||||||
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
|
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
|
||||||
assert!(matches!(def.kind, MacroDefKind::BuiltInEager(..)));
|
|
||||||
let token_tree = macro_call.value.token_tree();
|
|
||||||
|
|
||||||
let Some(token_tree) = token_tree else {
|
|
||||||
return Ok(ExpandResult { value: None, err:
|
|
||||||
Some(ExpandError::other(
|
|
||||||
"invalid token tree"
|
|
||||||
)),
|
|
||||||
});
|
|
||||||
};
|
|
||||||
let (parsed_args, arg_token_map) = mbe::syntax_node_to_token_tree(token_tree.syntax());
|
|
||||||
|
|
||||||
let ast_map = db.ast_id_map(macro_call.file_id);
|
let ast_map = db.ast_id_map(macro_call.file_id);
|
||||||
|
// the expansion which the ast id map is built upon has no whitespace, so the offsets are wrong as macro_call is from the token tree that has whitespace!
|
||||||
let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(¯o_call.value));
|
let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(¯o_call.value));
|
||||||
let expand_to = ExpandTo::from_call_site(¯o_call.value);
|
let expand_to = ExpandTo::from_call_site(¯o_call.value);
|
||||||
|
|
||||||
|
@ -61,41 +51,69 @@ pub fn expand_eager_macro_input(
|
||||||
let arg_id = db.intern_macro_call(MacroCallLoc {
|
let arg_id = db.intern_macro_call(MacroCallLoc {
|
||||||
def,
|
def,
|
||||||
krate,
|
krate,
|
||||||
eager: Some(Box::new(EagerCallInfo {
|
eager: None,
|
||||||
arg: Arc::new((parsed_args, arg_token_map)),
|
|
||||||
arg_id: None,
|
|
||||||
error: None,
|
|
||||||
})),
|
|
||||||
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
|
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
|
||||||
});
|
});
|
||||||
let arg_as_expr = match db.macro_arg_text(arg_id) {
|
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
|
||||||
Some(it) => it,
|
db.parse_macro_expansion(arg_id.as_macro_file());
|
||||||
None => {
|
// we need this map here as the expansion of the eager input fake file loses whitespace ...
|
||||||
return Ok(ExpandResult {
|
let mut ws_mapping = FxHashMap::default();
|
||||||
value: None,
|
if let Some((_, tm, _)) = db.macro_arg(arg_id).value.as_deref() {
|
||||||
err: Some(ExpandError::other("invalid token tree")),
|
ws_mapping.extend(tm.entries().filter_map(|(id, range)| {
|
||||||
})
|
Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range))
|
||||||
|
}));
|
||||||
}
|
}
|
||||||
};
|
|
||||||
let ExpandResult { value: expanded_eager_input, err } = eager_macro_recur(
|
let ExpandResult { value: expanded_eager_input, err } = {
|
||||||
|
eager_macro_recur(
|
||||||
db,
|
db,
|
||||||
&Hygiene::new(db, macro_call.file_id),
|
&Hygiene::new(db, macro_call.file_id),
|
||||||
InFile::new(arg_id.as_file(), SyntaxNode::new_root(arg_as_expr)),
|
InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
|
||||||
krate,
|
krate,
|
||||||
resolver,
|
resolver,
|
||||||
)?;
|
)?
|
||||||
let Some(expanded_eager_input) = expanded_eager_input else {
|
|
||||||
return Ok(ExpandResult { value: None, err })
|
|
||||||
};
|
};
|
||||||
let (mut subtree, token_map) = mbe::syntax_node_to_token_tree(&expanded_eager_input);
|
let err = parse_err.or(err);
|
||||||
|
|
||||||
|
let Some((expanded_eager_input, mapping)) = expanded_eager_input else {
|
||||||
|
return Ok(ExpandResult { value: None, err });
|
||||||
|
};
|
||||||
|
|
||||||
|
let og_tmap = mbe::syntax_node_to_token_map(
|
||||||
|
macro_call.value.token_tree().expect("macro_arg_text succeeded").syntax(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let (mut subtree, expanded_eager_input_token_map) =
|
||||||
|
mbe::syntax_node_to_token_tree(&expanded_eager_input);
|
||||||
|
|
||||||
|
// The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside
|
||||||
|
// so we need to remap them to the original input of the eager macro.
|
||||||
|
subtree.visit_ids(&|id| {
|
||||||
|
// Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix
|
||||||
|
|
||||||
|
if let Some(range) =
|
||||||
|
expanded_eager_input_token_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)
|
||||||
|
{
|
||||||
|
// remap from expanded eager input to eager input expansion
|
||||||
|
if let Some(og_range) = mapping.get(&range) {
|
||||||
|
// remap from eager input expansion to original eager input
|
||||||
|
if let Some(&og_range) = ws_mapping.get(og_range) {
|
||||||
|
if let Some(og_token) = og_tmap.token_by_range(og_range) {
|
||||||
|
return og_token;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tt::TokenId::UNSPECIFIED
|
||||||
|
});
|
||||||
subtree.delimiter = crate::tt::Delimiter::unspecified();
|
subtree.delimiter = crate::tt::Delimiter::unspecified();
|
||||||
|
|
||||||
let loc = MacroCallLoc {
|
let loc = MacroCallLoc {
|
||||||
def,
|
def,
|
||||||
krate,
|
krate,
|
||||||
eager: Some(Box::new(EagerCallInfo {
|
eager: Some(Box::new(EagerCallInfo {
|
||||||
arg: Arc::new((subtree, token_map)),
|
arg: Arc::new((subtree, og_tmap)),
|
||||||
arg_id: Some(arg_id),
|
arg_id,
|
||||||
error: err.clone(),
|
error: err.clone(),
|
||||||
})),
|
})),
|
||||||
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
|
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
|
||||||
|
@ -109,19 +127,16 @@ fn lazy_expand(
|
||||||
def: &MacroDefId,
|
def: &MacroDefId,
|
||||||
macro_call: InFile<ast::MacroCall>,
|
macro_call: InFile<ast::MacroCall>,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
) -> ExpandResult<InFile<Parse<SyntaxNode>>> {
|
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<mbe::TokenMap>)> {
|
||||||
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value);
|
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value);
|
||||||
|
|
||||||
let expand_to = ExpandTo::from_call_site(¯o_call.value);
|
let expand_to = ExpandTo::from_call_site(¯o_call.value);
|
||||||
let id = def.as_lazy_macro(
|
let ast_id = macro_call.with_value(ast_id);
|
||||||
db,
|
let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to });
|
||||||
krate,
|
|
||||||
MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id), expand_to },
|
|
||||||
);
|
|
||||||
|
|
||||||
let macro_file = id.as_macro_file();
|
let macro_file = id.as_macro_file();
|
||||||
|
|
||||||
db.parse_macro_expansion(macro_file).map(|parse| InFile::new(macro_file.into(), parse.0))
|
db.parse_macro_expansion(macro_file)
|
||||||
|
.map(|parse| (InFile::new(macro_file.into(), parse.0), parse.1))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eager_macro_recur(
|
fn eager_macro_recur(
|
||||||
|
@ -130,18 +145,43 @@ fn eager_macro_recur(
|
||||||
curr: InFile<SyntaxNode>,
|
curr: InFile<SyntaxNode>,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
|
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
|
||||||
) -> Result<ExpandResult<Option<SyntaxNode>>, UnresolvedMacro> {
|
) -> Result<ExpandResult<Option<(SyntaxNode, FxHashMap<TextRange, TextRange>)>>, UnresolvedMacro> {
|
||||||
let original = curr.value.clone_for_update();
|
let original = curr.value.clone_for_update();
|
||||||
|
let mut mapping = FxHashMap::default();
|
||||||
|
|
||||||
let children = original.descendants().filter_map(ast::MacroCall::cast);
|
|
||||||
let mut replacements = Vec::new();
|
let mut replacements = Vec::new();
|
||||||
|
|
||||||
// Note: We only report a single error inside of eager expansions
|
// Note: We only report a single error inside of eager expansions
|
||||||
let mut error = None;
|
let mut error = None;
|
||||||
|
let mut offset = 0i32;
|
||||||
|
let apply_offset = |it: TextSize, offset: i32| {
|
||||||
|
TextSize::from(u32::try_from(offset + u32::from(it) as i32).unwrap_or_default())
|
||||||
|
};
|
||||||
|
let mut children = original.preorder_with_tokens();
|
||||||
|
|
||||||
// Collect replacement
|
// Collect replacement
|
||||||
for child in children {
|
while let Some(child) = children.next() {
|
||||||
let def = match child.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
|
let WalkEvent::Enter(child) = child else { continue };
|
||||||
|
let call = match child {
|
||||||
|
syntax::NodeOrToken::Node(node) => match ast::MacroCall::cast(node) {
|
||||||
|
Some(it) => {
|
||||||
|
children.skip_subtree();
|
||||||
|
it
|
||||||
|
}
|
||||||
|
None => continue,
|
||||||
|
},
|
||||||
|
syntax::NodeOrToken::Token(t) => {
|
||||||
|
mapping.insert(
|
||||||
|
TextRange::new(
|
||||||
|
apply_offset(t.text_range().start(), offset),
|
||||||
|
apply_offset(t.text_range().end(), offset),
|
||||||
|
),
|
||||||
|
t.text_range(),
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let def = match call.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
|
||||||
Some(path) => macro_resolver(path.clone()).ok_or(UnresolvedMacro { path })?,
|
Some(path) => macro_resolver(path.clone()).ok_or(UnresolvedMacro { path })?,
|
||||||
None => {
|
None => {
|
||||||
error = Some(ExpandError::other("malformed macro invocation"));
|
error = Some(ExpandError::other("malformed macro invocation"));
|
||||||
|
@ -153,7 +193,7 @@ fn eager_macro_recur(
|
||||||
let ExpandResult { value, err } = match expand_eager_macro_input(
|
let ExpandResult { value, err } = match expand_eager_macro_input(
|
||||||
db,
|
db,
|
||||||
krate,
|
krate,
|
||||||
curr.with_value(child.clone()),
|
curr.with_value(call.clone()),
|
||||||
def,
|
def,
|
||||||
macro_resolver,
|
macro_resolver,
|
||||||
) {
|
) {
|
||||||
|
@ -161,9 +201,22 @@ fn eager_macro_recur(
|
||||||
Err(err) => return Err(err),
|
Err(err) => return Err(err),
|
||||||
};
|
};
|
||||||
match value {
|
match value {
|
||||||
Some(call) => {
|
Some(call_id) => {
|
||||||
let ExpandResult { value, err: err2 } =
|
let ExpandResult { value, err: err2 } =
|
||||||
db.parse_macro_expansion(call.as_macro_file());
|
db.parse_macro_expansion(call_id.as_macro_file());
|
||||||
|
|
||||||
|
let call_tt_start =
|
||||||
|
call.token_tree().unwrap().syntax().text_range().start();
|
||||||
|
let call_start = apply_offset(call.syntax().text_range().start(), offset);
|
||||||
|
if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() {
|
||||||
|
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
|
||||||
|
value
|
||||||
|
.1
|
||||||
|
.first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE)
|
||||||
|
.map(|r| (r + call_start, range + call_tt_start))
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
|
||||||
ExpandResult {
|
ExpandResult {
|
||||||
value: Some(value.0.syntax_node().clone_for_update()),
|
value: Some(value.0.syntax_node().clone_for_update()),
|
||||||
err: err.or(err2),
|
err: err.or(err2),
|
||||||
|
@ -177,36 +230,61 @@ fn eager_macro_recur(
|
||||||
| MacroDefKind::BuiltInAttr(..)
|
| MacroDefKind::BuiltInAttr(..)
|
||||||
| MacroDefKind::BuiltInDerive(..)
|
| MacroDefKind::BuiltInDerive(..)
|
||||||
| MacroDefKind::ProcMacro(..) => {
|
| MacroDefKind::ProcMacro(..) => {
|
||||||
let ExpandResult { value, err } =
|
let ExpandResult { value: (parse, tm), err } =
|
||||||
lazy_expand(db, &def, curr.with_value(child.clone()), krate);
|
lazy_expand(db, &def, curr.with_value(call.clone()), krate);
|
||||||
|
let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind {
|
||||||
|
Some(db.decl_macro_expander(def.krate, ast_id))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
// replace macro inside
|
// replace macro inside
|
||||||
let hygiene = Hygiene::new(db, value.file_id);
|
let hygiene = Hygiene::new(db, parse.file_id);
|
||||||
let ExpandResult { value, err: error } = eager_macro_recur(
|
let ExpandResult { value, err: error } = eager_macro_recur(
|
||||||
db,
|
db,
|
||||||
&hygiene,
|
&hygiene,
|
||||||
// FIXME: We discard parse errors here
|
// FIXME: We discard parse errors here
|
||||||
value.map(|it| it.syntax_node()),
|
parse.as_ref().map(|it| it.syntax_node()),
|
||||||
krate,
|
krate,
|
||||||
macro_resolver,
|
macro_resolver,
|
||||||
)?;
|
)?;
|
||||||
let err = err.or(error);
|
let err = err.or(error);
|
||||||
ExpandResult { value, err }
|
|
||||||
|
let call_tt_start = call.token_tree().unwrap().syntax().text_range().start();
|
||||||
|
let call_start = apply_offset(call.syntax().text_range().start(), offset);
|
||||||
|
if let Some((_tt, arg_map, _)) = parse
|
||||||
|
.file_id
|
||||||
|
.macro_file()
|
||||||
|
.and_then(|id| db.macro_arg(id.macro_call_id).value)
|
||||||
|
.as_deref()
|
||||||
|
{
|
||||||
|
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
|
||||||
|
tm.first_range_by_token(
|
||||||
|
decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid),
|
||||||
|
syntax::SyntaxKind::TOMBSTONE,
|
||||||
|
)
|
||||||
|
.map(|r| (r + call_start, range + call_tt_start))
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
// FIXME: Do we need to re-use _m here?
|
||||||
|
ExpandResult { value: value.map(|(n, _m)| n), err }
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if err.is_some() {
|
if err.is_some() {
|
||||||
error = err;
|
error = err;
|
||||||
}
|
}
|
||||||
// check if the whole original syntax is replaced
|
// check if the whole original syntax is replaced
|
||||||
if child.syntax() == &original {
|
if call.syntax() == &original {
|
||||||
return Ok(ExpandResult { value, err: error });
|
return Ok(ExpandResult { value: value.zip(Some(mapping)), err: error });
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(insert) = value {
|
if let Some(insert) = value {
|
||||||
replacements.push((child, insert));
|
offset += u32::from(insert.text_range().len()) as i32
|
||||||
|
- u32::from(call.syntax().text_range().len()) as i32;
|
||||||
|
replacements.push((call, insert));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
|
replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
|
||||||
Ok(ExpandResult { value: Some(original), err: error })
|
Ok(ExpandResult { value: Some((original, mapping)), err: error })
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,7 +26,7 @@ pub(crate) struct SyntaxFixups {
|
||||||
/// This is the information needed to reverse the fixups.
|
/// This is the information needed to reverse the fixups.
|
||||||
#[derive(Debug, Default, PartialEq, Eq)]
|
#[derive(Debug, Default, PartialEq, Eq)]
|
||||||
pub struct SyntaxFixupUndoInfo {
|
pub struct SyntaxFixupUndoInfo {
|
||||||
original: Vec<Subtree>,
|
original: Box<[Subtree]>,
|
||||||
}
|
}
|
||||||
|
|
||||||
const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
|
const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
|
||||||
|
@ -272,7 +272,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
replace,
|
replace,
|
||||||
token_map,
|
token_map,
|
||||||
next_id,
|
next_id,
|
||||||
undo_info: SyntaxFixupUndoInfo { original },
|
undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() },
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -472,13 +472,13 @@ fn foo () {match __ra_fixup {}}
|
||||||
check(
|
check(
|
||||||
r#"
|
r#"
|
||||||
fn foo() {
|
fn foo() {
|
||||||
match x {
|
match it {
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
fn foo () {match x {}}
|
fn foo () {match it {}}
|
||||||
"#]],
|
"#]],
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -547,11 +547,11 @@ fn foo () {a . __ra_fixup ; bar () ;}
|
||||||
check(
|
check(
|
||||||
r#"
|
r#"
|
||||||
fn foo() {
|
fn foo() {
|
||||||
let x = a
|
let it = a
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
fn foo () {let x = a ;}
|
fn foo () {let it = a ;}
|
||||||
"#]],
|
"#]],
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -561,11 +561,11 @@ fn foo () {let x = a ;}
|
||||||
check(
|
check(
|
||||||
r#"
|
r#"
|
||||||
fn foo() {
|
fn foo() {
|
||||||
let x = a.
|
let it = a.
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
fn foo () {let x = a . __ra_fixup ;}
|
fn foo () {let it = a . __ra_fixup ;}
|
||||||
"#]],
|
"#]],
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -126,7 +126,7 @@ struct HygieneInfo {
|
||||||
/// The start offset of the `macro_rules!` arguments or attribute input.
|
/// The start offset of the `macro_rules!` arguments or attribute input.
|
||||||
attr_input_or_mac_def_start: Option<InFile<TextSize>>,
|
attr_input_or_mac_def_start: Option<InFile<TextSize>>,
|
||||||
|
|
||||||
macro_def: Arc<TokenExpander>,
|
macro_def: TokenExpander,
|
||||||
macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
|
macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
|
||||||
macro_arg_shift: mbe::Shift,
|
macro_arg_shift: mbe::Shift,
|
||||||
exp_map: Arc<mbe::TokenMap>,
|
exp_map: Arc<mbe::TokenMap>,
|
||||||
|
@ -149,19 +149,15 @@ impl HygieneInfo {
|
||||||
token_id = unshifted;
|
token_id = unshifted;
|
||||||
(&attr_args.1, self.attr_input_or_mac_def_start?)
|
(&attr_args.1, self.attr_input_or_mac_def_start?)
|
||||||
}
|
}
|
||||||
None => (
|
None => (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())),
|
||||||
&self.macro_arg.1,
|
|
||||||
InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()),
|
|
||||||
),
|
|
||||||
},
|
},
|
||||||
_ => match origin {
|
_ => match origin {
|
||||||
mbe::Origin::Call => (
|
mbe::Origin::Call => {
|
||||||
&self.macro_arg.1,
|
(&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start()))
|
||||||
InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()),
|
}
|
||||||
),
|
mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def_start) {
|
||||||
mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def_start) {
|
(TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
|
||||||
(TokenExpander::DeclarativeMacro { def_site_token_map, .. }, Some(tt)) => {
|
(&expander.def_site_token_map, *tt)
|
||||||
(def_site_token_map, *tt)
|
|
||||||
}
|
}
|
||||||
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
|
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
|
||||||
},
|
},
|
||||||
|
@ -198,9 +194,9 @@ fn make_hygiene_info(
|
||||||
_ => None,
|
_ => None,
|
||||||
});
|
});
|
||||||
|
|
||||||
let macro_def = db.macro_def(loc.def).ok()?;
|
let macro_def = db.macro_expander(loc.def);
|
||||||
let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
|
let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
|
||||||
let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| {
|
let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
|
||||||
Arc::new((
|
Arc::new((
|
||||||
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
|
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
|
||||||
Default::default(),
|
Default::default(),
|
||||||
|
|
|
@ -37,11 +37,11 @@ use either::Either;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
algo::{self, skip_trivia_token},
|
algo::{self, skip_trivia_token},
|
||||||
ast::{self, AstNode, HasDocComments},
|
ast::{self, AstNode, HasDocComments},
|
||||||
Direction, SyntaxNode, SyntaxToken,
|
AstPtr, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast_id_map::FileAstId,
|
ast_id_map::{AstIdNode, ErasedFileAstId, FileAstId},
|
||||||
attrs::AttrId,
|
attrs::AttrId,
|
||||||
builtin_attr_macro::BuiltinAttrExpander,
|
builtin_attr_macro::BuiltinAttrExpander,
|
||||||
builtin_derive_macro::BuiltinDeriveExpander,
|
builtin_derive_macro::BuiltinDeriveExpander,
|
||||||
|
@ -127,7 +127,8 @@ impl_intern_key!(MacroCallId);
|
||||||
pub struct MacroCallLoc {
|
pub struct MacroCallLoc {
|
||||||
pub def: MacroDefId,
|
pub def: MacroDefId,
|
||||||
pub(crate) krate: CrateId,
|
pub(crate) krate: CrateId,
|
||||||
/// Some if `def` is a builtin eager macro.
|
/// Some if this is a macro call for an eager macro. Note that this is `None`
|
||||||
|
/// for the eager input macro file.
|
||||||
eager: Option<Box<EagerCallInfo>>,
|
eager: Option<Box<EagerCallInfo>>,
|
||||||
pub kind: MacroCallKind,
|
pub kind: MacroCallKind,
|
||||||
}
|
}
|
||||||
|
@ -152,11 +153,10 @@ pub enum MacroDefKind {
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
struct EagerCallInfo {
|
struct EagerCallInfo {
|
||||||
/// NOTE: This can be *either* the expansion result, *or* the argument to the eager macro!
|
/// The expanded argument of the eager macro.
|
||||||
arg: Arc<(tt::Subtree, TokenMap)>,
|
arg: Arc<(tt::Subtree, TokenMap)>,
|
||||||
/// call id of the eager macro's input file. If this is none, macro call containing this call info
|
/// Call id of the eager macro's input file (this is the macro file for its fully expanded input).
|
||||||
/// is an eager macro's input, otherwise it is its output.
|
arg_id: MacroCallId,
|
||||||
arg_id: Option<MacroCallId>,
|
|
||||||
error: Option<ExpandError>,
|
error: Option<ExpandError>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -221,11 +221,7 @@ impl HirFileId {
|
||||||
HirFileIdRepr::FileId(id) => break id,
|
HirFileIdRepr::FileId(id) => break id,
|
||||||
HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
|
HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
|
||||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id);
|
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id);
|
||||||
let is_include_expansion = loc.def.is_include()
|
let is_include_expansion = loc.def.is_include() && loc.eager.is_some();
|
||||||
&& matches!(
|
|
||||||
loc.eager.as_deref(),
|
|
||||||
Some(EagerCallInfo { arg_id: Some(_), .. })
|
|
||||||
);
|
|
||||||
file_id = match is_include_expansion.then(|| db.include_expand(macro_call_id)) {
|
file_id = match is_include_expansion.then(|| db.include_expand(macro_call_id)) {
|
||||||
Some(Ok((_, file))) => file.into(),
|
Some(Ok((_, file))) => file.into(),
|
||||||
_ => loc.kind.file_id(),
|
_ => loc.kind.file_id(),
|
||||||
|
@ -270,57 +266,13 @@ impl HirFileId {
|
||||||
/// Return expansion information if it is a macro-expansion file
|
/// Return expansion information if it is a macro-expansion file
|
||||||
pub fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo> {
|
pub fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo> {
|
||||||
let macro_file = self.macro_file()?;
|
let macro_file = self.macro_file()?;
|
||||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
ExpansionInfo::new(db, macro_file)
|
||||||
|
|
||||||
let arg_tt = loc.kind.arg(db)?;
|
|
||||||
|
|
||||||
let macro_def = db.macro_def(loc.def).ok()?;
|
|
||||||
let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
|
|
||||||
let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| {
|
|
||||||
Arc::new((
|
|
||||||
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
|
|
||||||
Default::default(),
|
|
||||||
Default::default(),
|
|
||||||
))
|
|
||||||
});
|
|
||||||
|
|
||||||
let def = loc.def.ast_id().left().and_then(|id| {
|
|
||||||
let def_tt = match id.to_node(db) {
|
|
||||||
ast::Macro::MacroRules(mac) => mac.token_tree()?,
|
|
||||||
ast::Macro::MacroDef(_) if matches!(*macro_def, TokenExpander::BuiltinAttr(_)) => {
|
|
||||||
return None
|
|
||||||
}
|
|
||||||
ast::Macro::MacroDef(mac) => mac.body()?,
|
|
||||||
};
|
|
||||||
Some(InFile::new(id.file_id, def_tt))
|
|
||||||
});
|
|
||||||
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
|
|
||||||
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
|
|
||||||
// FIXME: handle `cfg_attr`
|
|
||||||
let tt = ast_id
|
|
||||||
.to_node(db)
|
|
||||||
.doc_comments_and_attrs()
|
|
||||||
.nth(invoc_attr_index.ast_index())
|
|
||||||
.and_then(Either::left)?
|
|
||||||
.token_tree()?;
|
|
||||||
Some(InFile::new(ast_id.file_id, tt))
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
});
|
|
||||||
|
|
||||||
Some(ExpansionInfo {
|
|
||||||
expanded: InFile::new(self, parse.syntax_node()),
|
|
||||||
arg: InFile::new(loc.kind.file_id(), arg_tt),
|
|
||||||
attr_input_or_mac_def,
|
|
||||||
macro_arg_shift: mbe::Shift::new(¯o_arg.0),
|
|
||||||
macro_arg,
|
|
||||||
macro_def,
|
|
||||||
exp_map,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Indicate it is macro file generated for builtin derive
|
pub fn as_builtin_derive_attr_node(
|
||||||
pub fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> Option<InFile<ast::Attr>> {
|
&self,
|
||||||
|
db: &dyn db::ExpandDatabase,
|
||||||
|
) -> Option<InFile<ast::Attr>> {
|
||||||
let macro_file = self.macro_file()?;
|
let macro_file = self.macro_file()?;
|
||||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||||
let attr = match loc.def.kind {
|
let attr = match loc.def.kind {
|
||||||
|
@ -333,8 +285,22 @@ impl HirFileId {
|
||||||
pub fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
|
pub fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||||
match self.macro_file() {
|
match self.macro_file() {
|
||||||
Some(macro_file) => {
|
Some(macro_file) => {
|
||||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
matches!(
|
||||||
matches!(loc.def.kind, MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _))
|
db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind,
|
||||||
|
MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
None => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||||
|
match self.macro_file() {
|
||||||
|
Some(macro_file) => {
|
||||||
|
matches!(
|
||||||
|
db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind,
|
||||||
|
MacroDefKind::BuiltInDerive(..)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
None => false,
|
None => false,
|
||||||
}
|
}
|
||||||
|
@ -344,8 +310,7 @@ impl HirFileId {
|
||||||
pub fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
|
pub fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||||
match self.macro_file() {
|
match self.macro_file() {
|
||||||
Some(macro_file) => {
|
Some(macro_file) => {
|
||||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
db.lookup_intern_macro_call(macro_file.macro_call_id).def.is_include()
|
||||||
loc.def.is_include()
|
|
||||||
}
|
}
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
|
@ -355,7 +320,7 @@ impl HirFileId {
|
||||||
match self.macro_file() {
|
match self.macro_file() {
|
||||||
Some(macro_file) => {
|
Some(macro_file) => {
|
||||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||||
matches!(loc.eager.as_deref(), Some(EagerCallInfo { .. }))
|
matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
|
||||||
}
|
}
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
|
@ -536,9 +501,9 @@ impl MacroCallKind {
|
||||||
};
|
};
|
||||||
|
|
||||||
let range = match kind {
|
let range = match kind {
|
||||||
MacroCallKind::FnLike { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
|
MacroCallKind::FnLike { ast_id, .. } => ast_id.to_ptr(db).text_range(),
|
||||||
MacroCallKind::Derive { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
|
MacroCallKind::Derive { ast_id, .. } => ast_id.to_ptr(db).text_range(),
|
||||||
MacroCallKind::Attr { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
|
MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).text_range(),
|
||||||
};
|
};
|
||||||
|
|
||||||
FileRange { range, file_id }
|
FileRange { range, file_id }
|
||||||
|
@ -588,13 +553,18 @@ impl MacroCallKind {
|
||||||
FileRange { range, file_id }
|
FileRange { range, file_id }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn arg(&self, db: &dyn db::ExpandDatabase) -> Option<SyntaxNode> {
|
fn arg(&self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
|
||||||
match self {
|
match self {
|
||||||
MacroCallKind::FnLike { ast_id, .. } => {
|
MacroCallKind::FnLike { ast_id, .. } => ast_id
|
||||||
Some(ast_id.to_node(db).token_tree()?.syntax().clone())
|
.to_in_file_node(db)
|
||||||
|
.map(|it| Some(it.token_tree()?.syntax().clone()))
|
||||||
|
.transpose(),
|
||||||
|
MacroCallKind::Derive { ast_id, .. } => {
|
||||||
|
Some(ast_id.to_in_file_node(db).syntax().cloned())
|
||||||
|
}
|
||||||
|
MacroCallKind::Attr { ast_id, .. } => {
|
||||||
|
Some(ast_id.to_in_file_node(db).syntax().cloned())
|
||||||
}
|
}
|
||||||
MacroCallKind::Derive { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()),
|
|
||||||
MacroCallKind::Attr { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -612,13 +582,13 @@ impl MacroCallId {
|
||||||
/// ExpansionInfo mainly describes how to map text range between src and expanded macro
|
/// ExpansionInfo mainly describes how to map text range between src and expanded macro
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct ExpansionInfo {
|
pub struct ExpansionInfo {
|
||||||
expanded: InFile<SyntaxNode>,
|
expanded: InMacroFile<SyntaxNode>,
|
||||||
/// The argument TokenTree or item for attributes
|
/// The argument TokenTree or item for attributes
|
||||||
arg: InFile<SyntaxNode>,
|
arg: InFile<SyntaxNode>,
|
||||||
/// The `macro_rules!` or attribute input.
|
/// The `macro_rules!` or attribute input.
|
||||||
attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
|
attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
|
||||||
|
|
||||||
macro_def: Arc<TokenExpander>,
|
macro_def: TokenExpander,
|
||||||
macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
|
macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
|
||||||
/// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg
|
/// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg
|
||||||
/// and as such we need to shift tokens if they are part of an attributes input instead of their item.
|
/// and as such we need to shift tokens if they are part of an attributes input instead of their item.
|
||||||
|
@ -628,7 +598,7 @@ pub struct ExpansionInfo {
|
||||||
|
|
||||||
impl ExpansionInfo {
|
impl ExpansionInfo {
|
||||||
pub fn expanded(&self) -> InFile<SyntaxNode> {
|
pub fn expanded(&self) -> InFile<SyntaxNode> {
|
||||||
self.expanded.clone()
|
self.expanded.clone().into()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn call_node(&self) -> Option<InFile<SyntaxNode>> {
|
pub fn call_node(&self) -> Option<InFile<SyntaxNode>> {
|
||||||
|
@ -659,7 +629,7 @@ impl ExpansionInfo {
|
||||||
let token_id_in_attr_input = if let Some(item) = item {
|
let token_id_in_attr_input = if let Some(item) = item {
|
||||||
// check if we are mapping down in an attribute input
|
// check if we are mapping down in an attribute input
|
||||||
// this is a special case as attributes can have two inputs
|
// this is a special case as attributes can have two inputs
|
||||||
let call_id = self.expanded.file_id.macro_file()?.macro_call_id;
|
let call_id = self.expanded.file_id.macro_call_id;
|
||||||
let loc = db.lookup_intern_macro_call(call_id);
|
let loc = db.lookup_intern_macro_call(call_id);
|
||||||
|
|
||||||
let token_range = token.value.text_range();
|
let token_range = token.value.text_range();
|
||||||
|
@ -705,7 +675,7 @@ impl ExpansionInfo {
|
||||||
let relative_range =
|
let relative_range =
|
||||||
token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
|
token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
|
||||||
let token_id = self.macro_arg.1.token_by_range(relative_range)?;
|
let token_id = self.macro_arg.1.token_by_range(relative_range)?;
|
||||||
// conditionally shift the id by a declaratives macro definition
|
// conditionally shift the id by a declarative macro definition
|
||||||
self.macro_def.map_id_down(token_id)
|
self.macro_def.map_id_down(token_id)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -715,7 +685,7 @@ impl ExpansionInfo {
|
||||||
.ranges_by_token(token_id, token.value.kind())
|
.ranges_by_token(token_id, token.value.kind())
|
||||||
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
|
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
|
||||||
|
|
||||||
Some(tokens.map(move |token| self.expanded.with_value(token)))
|
Some(tokens.map(move |token| InFile::new(self.expanded.file_id.into(), token)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Map a token up out of the expansion it resides in into the arguments of the macro call of the expansion.
|
/// Map a token up out of the expansion it resides in into the arguments of the macro call of the expansion.
|
||||||
|
@ -724,18 +694,17 @@ impl ExpansionInfo {
|
||||||
db: &dyn db::ExpandDatabase,
|
db: &dyn db::ExpandDatabase,
|
||||||
token: InFile<&SyntaxToken>,
|
token: InFile<&SyntaxToken>,
|
||||||
) -> Option<(InFile<SyntaxToken>, Origin)> {
|
) -> Option<(InFile<SyntaxToken>, Origin)> {
|
||||||
|
assert_eq!(token.file_id, self.expanded.file_id.into());
|
||||||
// Fetch the id through its text range,
|
// Fetch the id through its text range,
|
||||||
let token_id = self.exp_map.token_by_range(token.value.text_range())?;
|
let token_id = self.exp_map.token_by_range(token.value.text_range())?;
|
||||||
// conditionally unshifting the id to accommodate for macro-rules def site
|
// conditionally unshifting the id to accommodate for macro-rules def site
|
||||||
let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
|
let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
|
||||||
|
|
||||||
let call_id = self.expanded.file_id.macro_file()?.macro_call_id;
|
let call_id = self.expanded.file_id.macro_call_id;
|
||||||
let loc = db.lookup_intern_macro_call(call_id);
|
let loc = db.lookup_intern_macro_call(call_id);
|
||||||
|
|
||||||
// Special case: map tokens from `include!` expansions to the included file
|
// Special case: map tokens from `include!` expansions to the included file
|
||||||
if loc.def.is_include()
|
if loc.def.is_include() {
|
||||||
&& matches!(loc.eager.as_deref(), Some(EagerCallInfo { arg_id: Some(_), .. }))
|
|
||||||
{
|
|
||||||
if let Ok((tt_and_map, file_id)) = db.include_expand(call_id) {
|
if let Ok((tt_and_map, file_id)) = db.include_expand(call_id) {
|
||||||
let range = tt_and_map.1.first_range_by_token(token_id, token.value.kind())?;
|
let range = tt_and_map.1.first_range_by_token(token_id, token.value.kind())?;
|
||||||
let source = db.parse(file_id);
|
let source = db.parse(file_id);
|
||||||
|
@ -765,9 +734,9 @@ impl ExpansionInfo {
|
||||||
}
|
}
|
||||||
_ => match origin {
|
_ => match origin {
|
||||||
mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()),
|
mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()),
|
||||||
mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def) {
|
mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def) {
|
||||||
(TokenExpander::DeclarativeMacro { def_site_token_map, .. }, Some(tt)) => {
|
(TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
|
||||||
(def_site_token_map, tt.syntax().cloned())
|
(&expander.def_site_token_map, tt.syntax().cloned())
|
||||||
}
|
}
|
||||||
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
|
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
|
||||||
},
|
},
|
||||||
|
@ -779,6 +748,58 @@ impl ExpansionInfo {
|
||||||
tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
|
tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
|
||||||
Some((tt.with_value(token), origin))
|
Some((tt.with_value(token), origin))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFile) -> Option<ExpansionInfo> {
|
||||||
|
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||||
|
|
||||||
|
let arg_tt = loc.kind.arg(db)?;
|
||||||
|
|
||||||
|
let macro_def = db.macro_expander(loc.def);
|
||||||
|
let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
|
||||||
|
let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() };
|
||||||
|
|
||||||
|
let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
|
||||||
|
Arc::new((
|
||||||
|
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
|
||||||
|
Default::default(),
|
||||||
|
Default::default(),
|
||||||
|
))
|
||||||
|
});
|
||||||
|
|
||||||
|
let def = loc.def.ast_id().left().and_then(|id| {
|
||||||
|
let def_tt = match id.to_node(db) {
|
||||||
|
ast::Macro::MacroRules(mac) => mac.token_tree()?,
|
||||||
|
ast::Macro::MacroDef(_) if matches!(macro_def, TokenExpander::BuiltInAttr(_)) => {
|
||||||
|
return None
|
||||||
|
}
|
||||||
|
ast::Macro::MacroDef(mac) => mac.body()?,
|
||||||
|
};
|
||||||
|
Some(InFile::new(id.file_id, def_tt))
|
||||||
|
});
|
||||||
|
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
|
||||||
|
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
|
||||||
|
// FIXME: handle `cfg_attr`
|
||||||
|
let tt = ast_id
|
||||||
|
.to_node(db)
|
||||||
|
.doc_comments_and_attrs()
|
||||||
|
.nth(invoc_attr_index.ast_index())
|
||||||
|
.and_then(Either::left)?
|
||||||
|
.token_tree()?;
|
||||||
|
Some(InFile::new(ast_id.file_id, tt))
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
});
|
||||||
|
|
||||||
|
Some(ExpansionInfo {
|
||||||
|
expanded,
|
||||||
|
arg: arg_tt,
|
||||||
|
attr_input_or_mac_def,
|
||||||
|
macro_arg_shift: mbe::Shift::new(¯o_arg.0),
|
||||||
|
macro_arg,
|
||||||
|
macro_def,
|
||||||
|
exp_map,
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `AstId` points to an AST node in any file.
|
/// `AstId` points to an AST node in any file.
|
||||||
|
@ -786,10 +807,26 @@ impl ExpansionInfo {
|
||||||
/// It is stable across reparses, and can be used as salsa key/value.
|
/// It is stable across reparses, and can be used as salsa key/value.
|
||||||
pub type AstId<N> = InFile<FileAstId<N>>;
|
pub type AstId<N> = InFile<FileAstId<N>>;
|
||||||
|
|
||||||
impl<N: AstNode> AstId<N> {
|
impl<N: AstIdNode> AstId<N> {
|
||||||
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
|
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
|
||||||
let root = db.parse_or_expand(self.file_id);
|
self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
|
||||||
db.ast_id_map(self.file_id).get(self.value).to_node(&root)
|
}
|
||||||
|
pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> InFile<N> {
|
||||||
|
InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)))
|
||||||
|
}
|
||||||
|
pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr<N> {
|
||||||
|
db.ast_id_map(self.file_id).get(self.value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type ErasedAstId = InFile<ErasedFileAstId>;
|
||||||
|
|
||||||
|
impl ErasedAstId {
|
||||||
|
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
|
||||||
|
self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
|
||||||
|
}
|
||||||
|
pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr {
|
||||||
|
db.ast_id_map(self.file_id).get_raw(self.value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -850,7 +887,7 @@ impl<L, R> InFile<Either<L, R>> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> InFile<&'a SyntaxNode> {
|
impl InFile<&SyntaxNode> {
|
||||||
pub fn ancestors_with_macros(
|
pub fn ancestors_with_macros(
|
||||||
self,
|
self,
|
||||||
db: &dyn db::ExpandDatabase,
|
db: &dyn db::ExpandDatabase,
|
||||||
|
@ -1011,6 +1048,18 @@ impl InFile<SyntaxToken> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
|
||||||
|
pub struct InMacroFile<T> {
|
||||||
|
pub file_id: MacroFile,
|
||||||
|
pub value: T,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> From<InMacroFile<T>> for InFile<T> {
|
||||||
|
fn from(macro_file: InMacroFile<T>) -> Self {
|
||||||
|
InFile { file_id: macro_file.file_id.into(), value: macro_file.value }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn ascend_node_border_tokens(
|
fn ascend_node_border_tokens(
|
||||||
db: &dyn db::ExpandDatabase,
|
db: &dyn db::ExpandDatabase,
|
||||||
InFile { file_id, value: node }: InFile<&SyntaxNode>,
|
InFile { file_id, value: node }: InFile<&SyntaxNode>,
|
||||||
|
|
|
@ -126,7 +126,7 @@ struct Display<'a> {
|
||||||
path: &'a ModPath,
|
path: &'a ModPath,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> fmt::Display for Display<'a> {
|
impl fmt::Display for Display<'_> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
display_fmt_path(self.db, self.path, f, true)
|
display_fmt_path(self.db, self.path, f, true)
|
||||||
}
|
}
|
||||||
|
@ -137,7 +137,7 @@ struct UnescapedDisplay<'a> {
|
||||||
path: &'a UnescapedModPath<'a>,
|
path: &'a UnescapedModPath<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> fmt::Display for UnescapedDisplay<'a> {
|
impl fmt::Display for UnescapedDisplay<'_> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
display_fmt_path(self.db, self.path.0, f, false)
|
display_fmt_path(self.db, self.path.0, f, false)
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,7 @@ enum Repr {
|
||||||
TupleField(usize),
|
TupleField(usize),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> UnescapedName<'a> {
|
impl UnescapedName<'_> {
|
||||||
/// Returns the textual representation of this name as a [`SmolStr`]. Prefer using this over
|
/// Returns the textual representation of this name as a [`SmolStr`]. Prefer using this over
|
||||||
/// [`ToString::to_string`] if possible as this conversion is cheaper in the general case.
|
/// [`ToString::to_string`] if possible as this conversion is cheaper in the general case.
|
||||||
pub fn to_smol_str(&self) -> SmolStr {
|
pub fn to_smol_str(&self) -> SmolStr {
|
||||||
|
@ -40,7 +40,7 @@ impl<'a> UnescapedName<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn display(&'a self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + 'a {
|
pub fn display(&self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + '_ {
|
||||||
_ = db;
|
_ = db;
|
||||||
UnescapedDisplay { name: self }
|
UnescapedDisplay { name: self }
|
||||||
}
|
}
|
||||||
|
@ -96,6 +96,15 @@ impl Name {
|
||||||
Name::new_inline("[missing name]")
|
Name::new_inline("[missing name]")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns true if this is a fake name for things missing in the source code. See
|
||||||
|
/// [`missing()`][Self::missing] for details.
|
||||||
|
///
|
||||||
|
/// Use this method instead of comparing with `Self::missing()` as missing names
|
||||||
|
/// (ideally should) have a `gensym` semantics.
|
||||||
|
pub fn is_missing(&self) -> bool {
|
||||||
|
self == &Name::missing()
|
||||||
|
}
|
||||||
|
|
||||||
/// Generates a new name which is only equal to itself, by incrementing a counter. Due
|
/// Generates a new name which is only equal to itself, by incrementing a counter. Due
|
||||||
/// its implementation, it should not be used in things that salsa considers, like
|
/// its implementation, it should not be used in things that salsa considers, like
|
||||||
/// type names or field names, and it should be only used in names of local variables
|
/// type names or field names, and it should be only used in names of local variables
|
||||||
|
@ -162,7 +171,7 @@ struct Display<'a> {
|
||||||
name: &'a Name,
|
name: &'a Name,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> fmt::Display for Display<'a> {
|
impl fmt::Display for Display<'_> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match &self.name.0 {
|
match &self.name.0 {
|
||||||
Repr::Text(text) => fmt::Display::fmt(&text, f),
|
Repr::Text(text) => fmt::Display::fmt(&text, f),
|
||||||
|
@ -175,7 +184,7 @@ struct UnescapedDisplay<'a> {
|
||||||
name: &'a UnescapedName<'a>,
|
name: &'a UnescapedName<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> fmt::Display for UnescapedDisplay<'a> {
|
impl fmt::Display for UnescapedDisplay<'_> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match &self.name.0 .0 {
|
match &self.name.0 .0 {
|
||||||
Repr::Text(text) => {
|
Repr::Text(text) => {
|
||||||
|
@ -282,8 +291,10 @@ pub mod known {
|
||||||
alloc,
|
alloc,
|
||||||
iter,
|
iter,
|
||||||
ops,
|
ops,
|
||||||
|
fmt,
|
||||||
future,
|
future,
|
||||||
result,
|
result,
|
||||||
|
string,
|
||||||
boxed,
|
boxed,
|
||||||
option,
|
option,
|
||||||
prelude,
|
prelude,
|
||||||
|
@ -311,6 +322,7 @@ pub mod known {
|
||||||
RangeToInclusive,
|
RangeToInclusive,
|
||||||
RangeTo,
|
RangeTo,
|
||||||
Range,
|
Range,
|
||||||
|
String,
|
||||||
Neg,
|
Neg,
|
||||||
Not,
|
Not,
|
||||||
None,
|
None,
|
||||||
|
@ -321,6 +333,7 @@ pub mod known {
|
||||||
iter_mut,
|
iter_mut,
|
||||||
len,
|
len,
|
||||||
is_empty,
|
is_empty,
|
||||||
|
as_str,
|
||||||
new,
|
new,
|
||||||
// Builtin macros
|
// Builtin macros
|
||||||
asm,
|
asm,
|
||||||
|
@ -334,6 +347,7 @@ pub mod known {
|
||||||
core_panic,
|
core_panic,
|
||||||
env,
|
env,
|
||||||
file,
|
file,
|
||||||
|
format,
|
||||||
format_args_nl,
|
format_args_nl,
|
||||||
format_args,
|
format_args,
|
||||||
global_asm,
|
global_asm,
|
||||||
|
@ -365,6 +379,7 @@ pub mod known {
|
||||||
cfg_eval,
|
cfg_eval,
|
||||||
crate_type,
|
crate_type,
|
||||||
derive,
|
derive,
|
||||||
|
derive_const,
|
||||||
global_allocator,
|
global_allocator,
|
||||||
no_core,
|
no_core,
|
||||||
no_std,
|
no_std,
|
||||||
|
|
|
@ -19,14 +19,15 @@ bitflags = "2.1.0"
|
||||||
smallvec.workspace = true
|
smallvec.workspace = true
|
||||||
ena = "0.14.0"
|
ena = "0.14.0"
|
||||||
either = "1.7.0"
|
either = "1.7.0"
|
||||||
|
oorandom = "11.1.3"
|
||||||
tracing = "0.1.35"
|
tracing = "0.1.35"
|
||||||
rustc-hash = "1.1.0"
|
rustc-hash = "1.1.0"
|
||||||
scoped-tls = "1.0.0"
|
scoped-tls = "1.0.0"
|
||||||
chalk-solve = { version = "0.91.0", default-features = false }
|
chalk-solve = { version = "0.92.0", default-features = false }
|
||||||
chalk-ir = "0.91.0"
|
chalk-ir = "0.92.0"
|
||||||
chalk-recursive = { version = "0.91.0", default-features = false }
|
chalk-recursive = { version = "0.92.0", default-features = false }
|
||||||
chalk-derive = "0.91.0"
|
chalk-derive = "0.92.0"
|
||||||
la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
|
la-arena.workspace = true
|
||||||
once_cell = "1.17.0"
|
once_cell = "1.17.0"
|
||||||
triomphe.workspace = true
|
triomphe.workspace = true
|
||||||
nohash-hasher.workspace = true
|
nohash-hasher.workspace = true
|
||||||
|
@ -47,7 +48,6 @@ limit.workspace = true
|
||||||
expect-test = "1.4.0"
|
expect-test = "1.4.0"
|
||||||
tracing = "0.1.35"
|
tracing = "0.1.35"
|
||||||
tracing-subscriber = { version = "0.3.16", default-features = false, features = [
|
tracing-subscriber = { version = "0.3.16", default-features = false, features = [
|
||||||
"env-filter",
|
|
||||||
"registry",
|
"registry",
|
||||||
] }
|
] }
|
||||||
tracing-tree = "0.2.1"
|
tracing-tree = "0.2.1"
|
||||||
|
|
|
@ -36,7 +36,7 @@ pub fn autoderef(
|
||||||
) -> impl Iterator<Item = Ty> {
|
) -> impl Iterator<Item = Ty> {
|
||||||
let mut table = InferenceTable::new(db, env);
|
let mut table = InferenceTable::new(db, env);
|
||||||
let ty = table.instantiate_canonical(ty);
|
let ty = table.instantiate_canonical(ty);
|
||||||
let mut autoderef = Autoderef::new(&mut table, ty);
|
let mut autoderef = Autoderef::new(&mut table, ty, false);
|
||||||
let mut v = Vec::new();
|
let mut v = Vec::new();
|
||||||
while let Some((ty, _steps)) = autoderef.next() {
|
while let Some((ty, _steps)) = autoderef.next() {
|
||||||
// `ty` may contain unresolved inference variables. Since there's no chance they would be
|
// `ty` may contain unresolved inference variables. Since there's no chance they would be
|
||||||
|
@ -63,12 +63,13 @@ pub(crate) struct Autoderef<'a, 'db> {
|
||||||
ty: Ty,
|
ty: Ty,
|
||||||
at_start: bool,
|
at_start: bool,
|
||||||
steps: Vec<(AutoderefKind, Ty)>,
|
steps: Vec<(AutoderefKind, Ty)>,
|
||||||
|
explicit: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'db> Autoderef<'a, 'db> {
|
impl<'a, 'db> Autoderef<'a, 'db> {
|
||||||
pub(crate) fn new(table: &'a mut InferenceTable<'db>, ty: Ty) -> Self {
|
pub(crate) fn new(table: &'a mut InferenceTable<'db>, ty: Ty, explicit: bool) -> Self {
|
||||||
let ty = table.resolve_ty_shallow(&ty);
|
let ty = table.resolve_ty_shallow(&ty);
|
||||||
Autoderef { table, ty, at_start: true, steps: Vec::new() }
|
Autoderef { table, ty, at_start: true, steps: Vec::new(), explicit }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn step_count(&self) -> usize {
|
pub(crate) fn step_count(&self) -> usize {
|
||||||
|
@ -97,7 +98,7 @@ impl Iterator for Autoderef<'_, '_> {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let (kind, new_ty) = autoderef_step(self.table, self.ty.clone())?;
|
let (kind, new_ty) = autoderef_step(self.table, self.ty.clone(), self.explicit)?;
|
||||||
|
|
||||||
self.steps.push((kind, self.ty.clone()));
|
self.steps.push((kind, self.ty.clone()));
|
||||||
self.ty = new_ty;
|
self.ty = new_ty;
|
||||||
|
@ -109,8 +110,9 @@ impl Iterator for Autoderef<'_, '_> {
|
||||||
pub(crate) fn autoderef_step(
|
pub(crate) fn autoderef_step(
|
||||||
table: &mut InferenceTable<'_>,
|
table: &mut InferenceTable<'_>,
|
||||||
ty: Ty,
|
ty: Ty,
|
||||||
|
explicit: bool,
|
||||||
) -> Option<(AutoderefKind, Ty)> {
|
) -> Option<(AutoderefKind, Ty)> {
|
||||||
if let Some(derefed) = builtin_deref(table, &ty, false) {
|
if let Some(derefed) = builtin_deref(table, &ty, explicit) {
|
||||||
Some((AutoderefKind::Builtin, table.resolve_ty_shallow(derefed)))
|
Some((AutoderefKind::Builtin, table.resolve_ty_shallow(derefed)))
|
||||||
} else {
|
} else {
|
||||||
Some((AutoderefKind::Overloaded, deref_by_trait(table, ty)?))
|
Some((AutoderefKind::Overloaded, deref_by_trait(table, ty)?))
|
||||||
|
@ -124,7 +126,6 @@ pub(crate) fn builtin_deref<'ty>(
|
||||||
) -> Option<&'ty Ty> {
|
) -> Option<&'ty Ty> {
|
||||||
match ty.kind(Interner) {
|
match ty.kind(Interner) {
|
||||||
TyKind::Ref(.., ty) => Some(ty),
|
TyKind::Ref(.., ty) => Some(ty),
|
||||||
// FIXME: Maybe accept this but diagnose if its not explicit?
|
|
||||||
TyKind::Raw(.., ty) if explicit => Some(ty),
|
TyKind::Raw(.., ty) if explicit => Some(ty),
|
||||||
&TyKind::Adt(chalk_ir::AdtId(adt), ref substs) => {
|
&TyKind::Adt(chalk_ir::AdtId(adt), ref substs) => {
|
||||||
if crate::lang_items::is_box(table.db, adt) {
|
if crate::lang_items::is_box(table.db, adt) {
|
||||||
|
|
|
@ -5,13 +5,13 @@ use std::{iter, sync::Arc};
|
||||||
|
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
|
|
||||||
use chalk_ir::{cast::Cast, fold::shift::Shift, CanonicalVarKinds};
|
use chalk_ir::{cast::Caster, fold::shift::Shift, CanonicalVarKinds};
|
||||||
use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait};
|
use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait};
|
||||||
|
|
||||||
use base_db::CrateId;
|
use base_db::CrateId;
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
hir::Movability,
|
hir::Movability,
|
||||||
lang_item::{lang_attr, LangItem, LangItemTarget},
|
lang_item::{LangItem, LangItemTarget},
|
||||||
AssocItemId, BlockId, GenericDefId, HasModule, ItemContainerId, Lookup, TypeAliasId,
|
AssocItemId, BlockId, GenericDefId, HasModule, ItemContainerId, Lookup, TypeAliasId,
|
||||||
};
|
};
|
||||||
use hir_expand::name::name;
|
use hir_expand::name::name;
|
||||||
|
@ -46,7 +46,7 @@ pub(crate) type AssociatedTyValue = chalk_solve::rust_ir::AssociatedTyValue<Inte
|
||||||
pub(crate) type FnDefDatum = chalk_solve::rust_ir::FnDefDatum<Interner>;
|
pub(crate) type FnDefDatum = chalk_solve::rust_ir::FnDefDatum<Interner>;
|
||||||
pub(crate) type Variances = chalk_ir::Variances<Interner>;
|
pub(crate) type Variances = chalk_ir::Variances<Interner>;
|
||||||
|
|
||||||
impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
|
impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
|
||||||
fn associated_ty_data(&self, id: AssocTypeId) -> Arc<AssociatedTyDatum> {
|
fn associated_ty_data(&self, id: AssocTypeId) -> Arc<AssociatedTyDatum> {
|
||||||
self.db.associated_ty_data(id)
|
self.db.associated_ty_data(id)
|
||||||
}
|
}
|
||||||
|
@ -60,9 +60,37 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
|
||||||
// FIXME: keep track of these
|
// FIXME: keep track of these
|
||||||
Arc::new(rust_ir::AdtRepr { c: false, packed: false, int: None })
|
Arc::new(rust_ir::AdtRepr { c: false, packed: false, int: None })
|
||||||
}
|
}
|
||||||
fn discriminant_type(&self, _ty: chalk_ir::Ty<Interner>) -> chalk_ir::Ty<Interner> {
|
fn discriminant_type(&self, ty: chalk_ir::Ty<Interner>) -> chalk_ir::Ty<Interner> {
|
||||||
// FIXME: keep track of this
|
if let chalk_ir::TyKind::Adt(id, _) = ty.kind(Interner) {
|
||||||
chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U32)).intern(Interner)
|
if let hir_def::AdtId::EnumId(e) = id.0 {
|
||||||
|
let enum_data = self.db.enum_data(e);
|
||||||
|
let ty = enum_data.repr.unwrap_or_default().discr_type();
|
||||||
|
return chalk_ir::TyKind::Scalar(match ty {
|
||||||
|
hir_def::layout::IntegerType::Pointer(is_signed) => match is_signed {
|
||||||
|
true => chalk_ir::Scalar::Int(chalk_ir::IntTy::Isize),
|
||||||
|
false => chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize),
|
||||||
|
},
|
||||||
|
hir_def::layout::IntegerType::Fixed(size, is_signed) => match is_signed {
|
||||||
|
true => chalk_ir::Scalar::Int(match size {
|
||||||
|
hir_def::layout::Integer::I8 => chalk_ir::IntTy::I8,
|
||||||
|
hir_def::layout::Integer::I16 => chalk_ir::IntTy::I16,
|
||||||
|
hir_def::layout::Integer::I32 => chalk_ir::IntTy::I32,
|
||||||
|
hir_def::layout::Integer::I64 => chalk_ir::IntTy::I64,
|
||||||
|
hir_def::layout::Integer::I128 => chalk_ir::IntTy::I128,
|
||||||
|
}),
|
||||||
|
false => chalk_ir::Scalar::Uint(match size {
|
||||||
|
hir_def::layout::Integer::I8 => chalk_ir::UintTy::U8,
|
||||||
|
hir_def::layout::Integer::I16 => chalk_ir::UintTy::U16,
|
||||||
|
hir_def::layout::Integer::I32 => chalk_ir::UintTy::U32,
|
||||||
|
hir_def::layout::Integer::I64 => chalk_ir::UintTy::U64,
|
||||||
|
hir_def::layout::Integer::I128 => chalk_ir::UintTy::U128,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.intern(Interner);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U8)).intern(Interner)
|
||||||
}
|
}
|
||||||
fn impl_datum(&self, impl_id: ImplId) -> Arc<ImplDatum> {
|
fn impl_datum(&self, impl_id: ImplId) -> Arc<ImplDatum> {
|
||||||
self.db.impl_datum(self.krate, impl_id)
|
self.db.impl_datum(self.krate, impl_id)
|
||||||
|
@ -565,7 +593,7 @@ pub(crate) fn trait_datum_query(
|
||||||
let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
|
let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
|
||||||
let associated_ty_ids = trait_data.associated_types().map(to_assoc_type_id).collect();
|
let associated_ty_ids = trait_data.associated_types().map(to_assoc_type_id).collect();
|
||||||
let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses };
|
let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses };
|
||||||
let well_known = lang_attr(db.upcast(), trait_).and_then(well_known_trait_from_lang_item);
|
let well_known = db.lang_attr(trait_.into()).and_then(well_known_trait_from_lang_item);
|
||||||
let trait_datum = TraitDatum {
|
let trait_datum = TraitDatum {
|
||||||
id: trait_id,
|
id: trait_id,
|
||||||
binders: make_binders(db, &generic_params, trait_datum_bound),
|
binders: make_binders(db, &generic_params, trait_datum_bound),
|
||||||
|
@ -593,6 +621,7 @@ fn well_known_trait_from_lang_item(item: LangItem) -> Option<WellKnownTrait> {
|
||||||
LangItem::Unsize => WellKnownTrait::Unsize,
|
LangItem::Unsize => WellKnownTrait::Unsize,
|
||||||
LangItem::Tuple => WellKnownTrait::Tuple,
|
LangItem::Tuple => WellKnownTrait::Tuple,
|
||||||
LangItem::PointeeTrait => WellKnownTrait::Pointee,
|
LangItem::PointeeTrait => WellKnownTrait::Pointee,
|
||||||
|
LangItem::FnPtrTrait => WellKnownTrait::FnPtr,
|
||||||
_ => return None,
|
_ => return None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -614,6 +643,7 @@ fn lang_item_from_well_known_trait(trait_: WellKnownTrait) -> LangItem {
|
||||||
WellKnownTrait::Unpin => LangItem::Unpin,
|
WellKnownTrait::Unpin => LangItem::Unpin,
|
||||||
WellKnownTrait::Unsize => LangItem::Unsize,
|
WellKnownTrait::Unsize => LangItem::Unsize,
|
||||||
WellKnownTrait::Pointee => LangItem::PointeeTrait,
|
WellKnownTrait::Pointee => LangItem::PointeeTrait,
|
||||||
|
WellKnownTrait::FnPtr => LangItem::FnPtrTrait,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -844,28 +874,34 @@ pub(super) fn generic_predicate_to_inline_bound(
|
||||||
}
|
}
|
||||||
let args_no_self = trait_ref.substitution.as_slice(Interner)[1..]
|
let args_no_self = trait_ref.substitution.as_slice(Interner)[1..]
|
||||||
.iter()
|
.iter()
|
||||||
.map(|ty| ty.clone().cast(Interner))
|
.cloned()
|
||||||
|
.casted(Interner)
|
||||||
.collect();
|
.collect();
|
||||||
let trait_bound = rust_ir::TraitBound { trait_id: trait_ref.trait_id, args_no_self };
|
let trait_bound = rust_ir::TraitBound { trait_id: trait_ref.trait_id, args_no_self };
|
||||||
Some(chalk_ir::Binders::new(binders, rust_ir::InlineBound::TraitBound(trait_bound)))
|
Some(chalk_ir::Binders::new(binders, rust_ir::InlineBound::TraitBound(trait_bound)))
|
||||||
}
|
}
|
||||||
WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection_ty), ty }) => {
|
WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection_ty), ty }) => {
|
||||||
let trait_ = projection_ty.trait_(db);
|
let generics =
|
||||||
if projection_ty.self_type_parameter(db) != self_ty_shifted_in {
|
generics(db.upcast(), from_assoc_type_id(projection_ty.associated_ty_id).into());
|
||||||
|
let (assoc_args, trait_args) =
|
||||||
|
projection_ty.substitution.as_slice(Interner).split_at(generics.len_self());
|
||||||
|
let (self_ty, args_no_self) =
|
||||||
|
trait_args.split_first().expect("projection without trait self type");
|
||||||
|
if self_ty.assert_ty_ref(Interner) != &self_ty_shifted_in {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let args_no_self = projection_ty.substitution.as_slice(Interner)[1..]
|
|
||||||
.iter()
|
let args_no_self = args_no_self.iter().cloned().casted(Interner).collect();
|
||||||
.map(|ty| ty.clone().cast(Interner))
|
let parameters = assoc_args.to_vec();
|
||||||
.collect();
|
|
||||||
let alias_eq_bound = rust_ir::AliasEqBound {
|
let alias_eq_bound = rust_ir::AliasEqBound {
|
||||||
value: ty.clone(),
|
value: ty.clone(),
|
||||||
trait_bound: rust_ir::TraitBound {
|
trait_bound: rust_ir::TraitBound {
|
||||||
trait_id: to_chalk_trait_id(trait_),
|
trait_id: to_chalk_trait_id(projection_ty.trait_(db)),
|
||||||
args_no_self,
|
args_no_self,
|
||||||
},
|
},
|
||||||
associated_ty_id: projection_ty.associated_ty_id,
|
associated_ty_id: projection_ty.associated_ty_id,
|
||||||
parameters: Vec::new(), // FIXME we don't support generic associated types yet
|
parameters,
|
||||||
};
|
};
|
||||||
Some(chalk_ir::Binders::new(
|
Some(chalk_ir::Binders::new(
|
||||||
binders,
|
binders,
|
||||||
|
|
|
@ -343,7 +343,8 @@ impl TyExt for Ty {
|
||||||
|
|
||||||
fn is_copy(self, db: &dyn HirDatabase, owner: DefWithBodyId) -> bool {
|
fn is_copy(self, db: &dyn HirDatabase, owner: DefWithBodyId) -> bool {
|
||||||
let crate_id = owner.module(db.upcast()).krate();
|
let crate_id = owner.module(db.upcast()).krate();
|
||||||
let Some(copy_trait) = db.lang_item(crate_id, LangItem::Copy).and_then(|x| x.as_trait()) else {
|
let Some(copy_trait) = db.lang_item(crate_id, LangItem::Copy).and_then(|it| it.as_trait())
|
||||||
|
else {
|
||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
let trait_ref = TyBuilder::trait_ref(db, copy_trait).push(self).build();
|
let trait_ref = TyBuilder::trait_ref(db, copy_trait).push(self).build();
|
||||||
|
|
|
@ -88,7 +88,7 @@ pub(crate) fn path_to_const(
|
||||||
ConstValue::Placeholder(to_placeholder_idx(db, p.into()))
|
ConstValue::Placeholder(to_placeholder_idx(db, p.into()))
|
||||||
}
|
}
|
||||||
ParamLoweringMode::Variable => match args.param_idx(p.into()) {
|
ParamLoweringMode::Variable => match args.param_idx(p.into()) {
|
||||||
Some(x) => ConstValue::BoundVar(BoundVar::new(debruijn, x)),
|
Some(it) => ConstValue::BoundVar(BoundVar::new(debruijn, it)),
|
||||||
None => {
|
None => {
|
||||||
never!(
|
never!(
|
||||||
"Generic list doesn't contain this param: {:?}, {:?}, {:?}",
|
"Generic list doesn't contain this param: {:?}, {:?}, {:?}",
|
||||||
|
@ -139,11 +139,11 @@ pub fn intern_const_ref(
|
||||||
let bytes = match value {
|
let bytes = match value {
|
||||||
LiteralConstRef::Int(i) => {
|
LiteralConstRef::Int(i) => {
|
||||||
// FIXME: We should handle failure of layout better.
|
// FIXME: We should handle failure of layout better.
|
||||||
let size = layout.map(|x| x.size.bytes_usize()).unwrap_or(16);
|
let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16);
|
||||||
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
|
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
|
||||||
}
|
}
|
||||||
LiteralConstRef::UInt(i) => {
|
LiteralConstRef::UInt(i) => {
|
||||||
let size = layout.map(|x| x.size.bytes_usize()).unwrap_or(16);
|
let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16);
|
||||||
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
|
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
|
||||||
}
|
}
|
||||||
LiteralConstRef::Bool(b) => ConstScalar::Bytes(vec![*b as u8], MemoryMap::default()),
|
LiteralConstRef::Bool(b) => ConstScalar::Bytes(vec![*b as u8], MemoryMap::default()),
|
||||||
|
@ -171,7 +171,7 @@ pub fn try_const_usize(db: &dyn HirDatabase, c: &Const) -> Option<u128> {
|
||||||
chalk_ir::ConstValue::InferenceVar(_) => None,
|
chalk_ir::ConstValue::InferenceVar(_) => None,
|
||||||
chalk_ir::ConstValue::Placeholder(_) => None,
|
chalk_ir::ConstValue::Placeholder(_) => None,
|
||||||
chalk_ir::ConstValue::Concrete(c) => match &c.interned {
|
chalk_ir::ConstValue::Concrete(c) => match &c.interned {
|
||||||
ConstScalar::Bytes(x, _) => Some(u128::from_le_bytes(pad16(&x, false))),
|
ConstScalar::Bytes(it, _) => Some(u128::from_le_bytes(pad16(&it, false))),
|
||||||
ConstScalar::UnevaluatedConst(c, subst) => {
|
ConstScalar::UnevaluatedConst(c, subst) => {
|
||||||
let ec = db.const_eval(*c, subst.clone()).ok()?;
|
let ec = db.const_eval(*c, subst.clone()).ok()?;
|
||||||
try_const_usize(db, &ec)
|
try_const_usize(db, &ec)
|
||||||
|
@ -228,7 +228,7 @@ pub(crate) fn const_eval_query(
|
||||||
}
|
}
|
||||||
GeneralConstId::InTypeConstId(c) => db.mir_body(c.into())?,
|
GeneralConstId::InTypeConstId(c) => db.mir_body(c.into())?,
|
||||||
};
|
};
|
||||||
let c = interpret_mir(db, &body, false).0?;
|
let c = interpret_mir(db, body, false).0?;
|
||||||
Ok(c)
|
Ok(c)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -241,7 +241,7 @@ pub(crate) fn const_eval_static_query(
|
||||||
Substitution::empty(Interner),
|
Substitution::empty(Interner),
|
||||||
db.trait_environment_for_body(def.into()),
|
db.trait_environment_for_body(def.into()),
|
||||||
)?;
|
)?;
|
||||||
let c = interpret_mir(db, &body, false).0?;
|
let c = interpret_mir(db, body, false).0?;
|
||||||
Ok(c)
|
Ok(c)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -268,7 +268,7 @@ pub(crate) fn const_eval_discriminant_variant(
|
||||||
Substitution::empty(Interner),
|
Substitution::empty(Interner),
|
||||||
db.trait_environment_for_body(def),
|
db.trait_environment_for_body(def),
|
||||||
)?;
|
)?;
|
||||||
let c = interpret_mir(db, &mir_body, false).0?;
|
let c = interpret_mir(db, mir_body, false).0?;
|
||||||
let c = try_const_usize(db, &c).unwrap() as i128;
|
let c = try_const_usize(db, &c).unwrap() as i128;
|
||||||
Ok(c)
|
Ok(c)
|
||||||
}
|
}
|
||||||
|
@ -293,7 +293,7 @@ pub(crate) fn eval_to_const(
|
||||||
}
|
}
|
||||||
let infer = ctx.clone().resolve_all();
|
let infer = ctx.clone().resolve_all();
|
||||||
if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, &ctx.body, &infer, expr) {
|
if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, &ctx.body, &infer, expr) {
|
||||||
if let Ok(result) = interpret_mir(db, &mir_body, true).0 {
|
if let Ok(result) = interpret_mir(db, Arc::new(mir_body), true).0 {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
use base_db::{fixture::WithFixture, FileId};
|
use base_db::{fixture::WithFixture, FileId};
|
||||||
use chalk_ir::Substitution;
|
use chalk_ir::Substitution;
|
||||||
use hir_def::db::DefDatabase;
|
use hir_def::db::DefDatabase;
|
||||||
|
use test_utils::skip_slow_tests;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
consteval::try_const_usize, db::HirDatabase, mir::pad16, test_db::TestDB, Const, ConstScalar,
|
consteval::try_const_usize, db::HirDatabase, mir::pad16, test_db::TestDB, Const, ConstScalar,
|
||||||
Interner,
|
Interner, MemoryMap,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
|
@ -16,7 +17,7 @@ mod intrinsics;
|
||||||
|
|
||||||
fn simplify(e: ConstEvalError) -> ConstEvalError {
|
fn simplify(e: ConstEvalError) -> ConstEvalError {
|
||||||
match e {
|
match e {
|
||||||
ConstEvalError::MirEvalError(MirEvalError::InFunction(_, e, _, _)) => {
|
ConstEvalError::MirEvalError(MirEvalError::InFunction(e, _)) => {
|
||||||
simplify(ConstEvalError::MirEvalError(*e))
|
simplify(ConstEvalError::MirEvalError(*e))
|
||||||
}
|
}
|
||||||
_ => e,
|
_ => e,
|
||||||
|
@ -36,7 +37,37 @@ fn check_fail(ra_fixture: &str, error: impl FnOnce(ConstEvalError) -> bool) {
|
||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn check_number(ra_fixture: &str, answer: i128) {
|
fn check_number(ra_fixture: &str, answer: i128) {
|
||||||
let (db, file_id) = TestDB::with_single_file(ra_fixture);
|
check_answer(ra_fixture, |b, _| {
|
||||||
|
assert_eq!(
|
||||||
|
b,
|
||||||
|
&answer.to_le_bytes()[0..b.len()],
|
||||||
|
"Bytes differ. In decimal form: actual = {}, expected = {answer}",
|
||||||
|
i128::from_le_bytes(pad16(b, true))
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
fn check_str(ra_fixture: &str, answer: &str) {
|
||||||
|
check_answer(ra_fixture, |b, mm| {
|
||||||
|
let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap());
|
||||||
|
let size = usize::from_le_bytes(b[b.len() / 2..].try_into().unwrap());
|
||||||
|
let Some(bytes) = mm.get(addr, size) else {
|
||||||
|
panic!("string data missed in the memory map");
|
||||||
|
};
|
||||||
|
assert_eq!(
|
||||||
|
bytes,
|
||||||
|
answer.as_bytes(),
|
||||||
|
"Bytes differ. In string form: actual = {}, expected = {answer}",
|
||||||
|
String::from_utf8_lossy(bytes)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
fn check_answer(ra_fixture: &str, check: impl FnOnce(&[u8], &MemoryMap)) {
|
||||||
|
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
|
||||||
|
let file_id = *file_ids.last().unwrap();
|
||||||
let r = match eval_goal(&db, file_id) {
|
let r = match eval_goal(&db, file_id) {
|
||||||
Ok(t) => t,
|
Ok(t) => t,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
@ -46,13 +77,8 @@ fn check_number(ra_fixture: &str, answer: i128) {
|
||||||
};
|
};
|
||||||
match &r.data(Interner).value {
|
match &r.data(Interner).value {
|
||||||
chalk_ir::ConstValue::Concrete(c) => match &c.interned {
|
chalk_ir::ConstValue::Concrete(c) => match &c.interned {
|
||||||
ConstScalar::Bytes(b, _) => {
|
ConstScalar::Bytes(b, mm) => {
|
||||||
assert_eq!(
|
check(b, mm);
|
||||||
b,
|
|
||||||
&answer.to_le_bytes()[0..b.len()],
|
|
||||||
"Bytes differ. In decimal form: actual = {}, expected = {answer}",
|
|
||||||
i128::from_le_bytes(pad16(b, true))
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
x => panic!("Expected number but found {:?}", x),
|
x => panic!("Expected number but found {:?}", x),
|
||||||
},
|
},
|
||||||
|
@ -87,7 +113,7 @@ fn eval_goal(db: &TestDB, file_id: FileId) -> Result<Const, ConstEvalError> {
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
.unwrap();
|
.expect("No const named GOAL found in the test");
|
||||||
db.const_eval(const_id.into(), Substitution::empty(Interner))
|
db.const_eval(const_id.into(), Substitution::empty(Interner))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -108,6 +134,7 @@ fn bit_op() {
|
||||||
check_fail(r#"const GOAL: i8 = 1 << 8"#, |e| {
|
check_fail(r#"const GOAL: i8 = 1 << 8"#, |e| {
|
||||||
e == ConstEvalError::MirEvalError(MirEvalError::Panic("Overflow in Shl".to_string()))
|
e == ConstEvalError::MirEvalError(MirEvalError::Panic("Overflow in Shl".to_string()))
|
||||||
});
|
});
|
||||||
|
check_number(r#"const GOAL: i32 = 100000000i32 << 11"#, (100000000i32 << 11) as i128);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -166,14 +193,21 @@ fn casts() {
|
||||||
check_number(
|
check_number(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: coerce_unsized, index, slice
|
//- minicore: coerce_unsized, index, slice
|
||||||
|
struct X {
|
||||||
|
unsize_field: [u8],
|
||||||
|
}
|
||||||
|
|
||||||
const GOAL: usize = {
|
const GOAL: usize = {
|
||||||
let a = [10, 20, 3, 15];
|
let a = [10, 20, 3, 15];
|
||||||
let x: &[i32] = &a;
|
let x: &[i32] = &a;
|
||||||
let y: *const [i32] = x;
|
let x: *const [i32] = x;
|
||||||
let z = y as *const [u8]; // slice fat pointer cast don't touch metadata
|
let x = x as *const [u8]; // slice fat pointer cast don't touch metadata
|
||||||
let q = z as *const str;
|
let x = x as *const str;
|
||||||
let p = q as *const [u8];
|
let x = x as *const X;
|
||||||
let w = unsafe { &*z };
|
let x = x as *const [i16];
|
||||||
|
let x = x as *const X;
|
||||||
|
let x = x as *const [u8];
|
||||||
|
let w = unsafe { &*x };
|
||||||
w.len()
|
w.len()
|
||||||
};
|
};
|
||||||
"#,
|
"#,
|
||||||
|
@ -198,6 +232,30 @@ fn raw_pointer_equality() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn alignment() {
|
||||||
|
check_answer(
|
||||||
|
r#"
|
||||||
|
//- minicore: transmute
|
||||||
|
use core::mem::transmute;
|
||||||
|
const GOAL: usize = {
|
||||||
|
let x: i64 = 2;
|
||||||
|
transmute(&x)
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
|b, _| assert_eq!(b[0] % 8, 0),
|
||||||
|
);
|
||||||
|
check_answer(
|
||||||
|
r#"
|
||||||
|
//- minicore: transmute
|
||||||
|
use core::mem::transmute;
|
||||||
|
static X: i64 = 12;
|
||||||
|
const GOAL: usize = transmute(&X);
|
||||||
|
"#,
|
||||||
|
|b, _| assert_eq!(b[0] % 8, 0),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn locals() {
|
fn locals() {
|
||||||
check_number(
|
check_number(
|
||||||
|
@ -1550,6 +1608,30 @@ fn closures() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn manual_fn_trait_impl() {
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
//- minicore: fn, copy
|
||||||
|
struct S(i32);
|
||||||
|
|
||||||
|
impl FnOnce<(i32, i32)> for S {
|
||||||
|
type Output = i32;
|
||||||
|
|
||||||
|
extern "rust-call" fn call_once(self, arg: (i32, i32)) -> i32 {
|
||||||
|
arg.0 + arg.1 + self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const GOAL: i32 = {
|
||||||
|
let s = S(1);
|
||||||
|
s(2, 3)
|
||||||
|
};
|
||||||
|
"#,
|
||||||
|
6,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn closure_and_impl_fn() {
|
fn closure_and_impl_fn() {
|
||||||
check_number(
|
check_number(
|
||||||
|
@ -1661,6 +1743,18 @@ fn function_pointer() {
|
||||||
"#,
|
"#,
|
||||||
5,
|
5,
|
||||||
);
|
);
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
fn add2(x: u8) -> u8 {
|
||||||
|
x + 2
|
||||||
|
}
|
||||||
|
const GOAL: u8 = {
|
||||||
|
let plus2 = add2 as fn(u8) -> u8;
|
||||||
|
plus2(3)
|
||||||
|
};
|
||||||
|
"#,
|
||||||
|
5,
|
||||||
|
);
|
||||||
check_number(
|
check_number(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: coerce_unsized, index, slice
|
//- minicore: coerce_unsized, index, slice
|
||||||
|
@ -1849,6 +1943,38 @@ fn dyn_trait() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn coerce_unsized() {
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
//- minicore: coerce_unsized, deref_mut, slice, index, transmute, non_null
|
||||||
|
use core::ops::{Deref, DerefMut, CoerceUnsized};
|
||||||
|
use core::{marker::Unsize, mem::transmute, ptr::NonNull};
|
||||||
|
|
||||||
|
struct ArcInner<T: ?Sized> {
|
||||||
|
strong: usize,
|
||||||
|
weak: usize,
|
||||||
|
data: T,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Arc<T: ?Sized> {
|
||||||
|
inner: NonNull<ArcInner<T>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
|
||||||
|
|
||||||
|
const GOAL: usize = {
|
||||||
|
let x = transmute::<usize, Arc<[i32; 3]>>(12);
|
||||||
|
let y: Arc<[i32]> = x;
|
||||||
|
let z = transmute::<Arc<[i32]>, (usize, usize)>(y);
|
||||||
|
z.1
|
||||||
|
};
|
||||||
|
|
||||||
|
"#,
|
||||||
|
3,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn boxes() {
|
fn boxes() {
|
||||||
check_number(
|
check_number(
|
||||||
|
@ -1960,6 +2086,17 @@ fn array_and_index() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn string() {
|
||||||
|
check_str(
|
||||||
|
r#"
|
||||||
|
//- minicore: coerce_unsized, index, slice
|
||||||
|
const GOAL: &str = "hello";
|
||||||
|
"#,
|
||||||
|
"hello",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn byte_string() {
|
fn byte_string() {
|
||||||
check_number(
|
check_number(
|
||||||
|
@ -2018,6 +2155,57 @@ fn consts() {
|
||||||
"#,
|
"#,
|
||||||
6,
|
6,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
const F1: i32 = 2147483647;
|
||||||
|
const F2: i32 = F1 - 25;
|
||||||
|
const GOAL: i32 = F2;
|
||||||
|
"#,
|
||||||
|
2147483622,
|
||||||
|
);
|
||||||
|
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
const F1: i32 = -2147483648;
|
||||||
|
const F2: i32 = F1 + 18;
|
||||||
|
const GOAL: i32 = F2;
|
||||||
|
"#,
|
||||||
|
-2147483630,
|
||||||
|
);
|
||||||
|
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
const F1: i32 = 10;
|
||||||
|
const F2: i32 = F1 - 20;
|
||||||
|
const GOAL: i32 = F2;
|
||||||
|
"#,
|
||||||
|
-10,
|
||||||
|
);
|
||||||
|
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
const F1: i32 = 25;
|
||||||
|
const F2: i32 = F1 - 25;
|
||||||
|
const GOAL: i32 = F2;
|
||||||
|
"#,
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
const A: i32 = -2147483648;
|
||||||
|
const GOAL: bool = A > 0;
|
||||||
|
"#,
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
const GOAL: i64 = (-2147483648_i32) as i64;
|
||||||
|
"#,
|
||||||
|
-2147483648,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -2285,6 +2473,25 @@ fn const_trait_assoc() {
|
||||||
"#,
|
"#,
|
||||||
32,
|
32,
|
||||||
);
|
);
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
//- /a/lib.rs crate:a
|
||||||
|
pub trait ToConst {
|
||||||
|
const VAL: usize;
|
||||||
|
}
|
||||||
|
pub const fn to_const<T: ToConst>() -> usize {
|
||||||
|
T::VAL
|
||||||
|
}
|
||||||
|
//- /main.rs crate:main deps:a
|
||||||
|
use a::{ToConst, to_const};
|
||||||
|
struct U0;
|
||||||
|
impl ToConst for U0 {
|
||||||
|
const VAL: usize = 5;
|
||||||
|
}
|
||||||
|
const GOAL: usize = to_const::<U0>();
|
||||||
|
"#,
|
||||||
|
5,
|
||||||
|
);
|
||||||
check_number(
|
check_number(
|
||||||
r#"
|
r#"
|
||||||
struct S<T>(*mut T);
|
struct S<T>(*mut T);
|
||||||
|
@ -2311,21 +2518,11 @@ fn const_trait_assoc() {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn panic_messages() {
|
fn exec_limits() {
|
||||||
check_fail(
|
if skip_slow_tests() {
|
||||||
r#"
|
return;
|
||||||
//- minicore: panic
|
|
||||||
const GOAL: u8 = {
|
|
||||||
let x: u16 = 2;
|
|
||||||
panic!("hello");
|
|
||||||
};
|
|
||||||
"#,
|
|
||||||
|e| e == ConstEvalError::MirEvalError(MirEvalError::Panic("hello".to_string())),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn exec_limits() {
|
|
||||||
check_fail(
|
check_fail(
|
||||||
r#"
|
r#"
|
||||||
const GOAL: usize = loop {};
|
const GOAL: usize = loop {};
|
||||||
|
@ -2339,7 +2536,7 @@ fn exec_limits() {
|
||||||
}
|
}
|
||||||
const GOAL: i32 = f(0);
|
const GOAL: i32 = f(0);
|
||||||
"#,
|
"#,
|
||||||
|e| e == ConstEvalError::MirEvalError(MirEvalError::StackOverflow),
|
|e| e == ConstEvalError::MirEvalError(MirEvalError::ExecutionLimitExceeded),
|
||||||
);
|
);
|
||||||
// Reasonable code should still work
|
// Reasonable code should still work
|
||||||
check_number(
|
check_number(
|
||||||
|
@ -2362,6 +2559,28 @@ fn exec_limits() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn memory_limit() {
|
||||||
|
check_fail(
|
||||||
|
r#"
|
||||||
|
extern "Rust" {
|
||||||
|
#[rustc_allocator]
|
||||||
|
fn __rust_alloc(size: usize, align: usize) -> *mut u8;
|
||||||
|
}
|
||||||
|
|
||||||
|
const GOAL: u8 = unsafe {
|
||||||
|
__rust_alloc(30_000_000_000, 1); // 30GB
|
||||||
|
2
|
||||||
|
};
|
||||||
|
"#,
|
||||||
|
|e| {
|
||||||
|
e == ConstEvalError::MirEvalError(MirEvalError::Panic(
|
||||||
|
"Memory allocation of 30000000000 bytes failed".to_string(),
|
||||||
|
))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn type_error() {
|
fn type_error() {
|
||||||
check_fail(
|
check_fail(
|
||||||
|
@ -2376,6 +2595,37 @@ fn type_error() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn unsized_field() {
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
//- minicore: coerce_unsized, index, slice, transmute
|
||||||
|
use core::mem::transmute;
|
||||||
|
|
||||||
|
struct Slice([u8]);
|
||||||
|
struct Slice2(Slice);
|
||||||
|
|
||||||
|
impl Slice2 {
|
||||||
|
fn as_inner(&self) -> &Slice {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_bytes(&self) -> &[u8] {
|
||||||
|
&self.as_inner().0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const GOAL: u8 = unsafe {
|
||||||
|
let x: &[u8] = &[1, 2, 3];
|
||||||
|
let x: &Slice2 = transmute(x);
|
||||||
|
let x = x.as_bytes();
|
||||||
|
x[0] + x[1] + x[2]
|
||||||
|
};
|
||||||
|
"#,
|
||||||
|
6,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn unsized_local() {
|
fn unsized_local() {
|
||||||
check_fail(
|
check_fail(
|
||||||
|
|
|
@ -14,6 +14,171 @@ fn size_of() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn size_of_val() {
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
//- minicore: coerce_unsized
|
||||||
|
extern "rust-intrinsic" {
|
||||||
|
pub fn size_of_val<T: ?Sized>(_: *const T) -> usize;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct X(i32, u8);
|
||||||
|
|
||||||
|
const GOAL: usize = size_of_val(&X(1, 2));
|
||||||
|
"#,
|
||||||
|
8,
|
||||||
|
);
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
//- minicore: coerce_unsized
|
||||||
|
extern "rust-intrinsic" {
|
||||||
|
pub fn size_of_val<T: ?Sized>(_: *const T) -> usize;
|
||||||
|
}
|
||||||
|
|
||||||
|
const GOAL: usize = {
|
||||||
|
let it: &[i32] = &[1, 2, 3];
|
||||||
|
size_of_val(it)
|
||||||
|
};
|
||||||
|
"#,
|
||||||
|
12,
|
||||||
|
);
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
//- minicore: coerce_unsized, transmute
|
||||||
|
use core::mem::transmute;
|
||||||
|
|
||||||
|
extern "rust-intrinsic" {
|
||||||
|
pub fn size_of_val<T: ?Sized>(_: *const T) -> usize;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct X {
|
||||||
|
x: i64,
|
||||||
|
y: u8,
|
||||||
|
t: [i32],
|
||||||
|
}
|
||||||
|
|
||||||
|
const GOAL: usize = unsafe {
|
||||||
|
let y: &X = transmute([0usize, 3]);
|
||||||
|
size_of_val(y)
|
||||||
|
};
|
||||||
|
"#,
|
||||||
|
24,
|
||||||
|
);
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
//- minicore: coerce_unsized, transmute
|
||||||
|
use core::mem::transmute;
|
||||||
|
|
||||||
|
extern "rust-intrinsic" {
|
||||||
|
pub fn size_of_val<T: ?Sized>(_: *const T) -> usize;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct X {
|
||||||
|
x: i32,
|
||||||
|
y: i64,
|
||||||
|
t: [u8],
|
||||||
|
}
|
||||||
|
|
||||||
|
const GOAL: usize = unsafe {
|
||||||
|
let y: &X = transmute([0usize, 15]);
|
||||||
|
size_of_val(y)
|
||||||
|
};
|
||||||
|
"#,
|
||||||
|
32,
|
||||||
|
);
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
//- minicore: coerce_unsized, fmt, builtin_impls
|
||||||
|
extern "rust-intrinsic" {
|
||||||
|
pub fn size_of_val<T: ?Sized>(_: *const T) -> usize;
|
||||||
|
}
|
||||||
|
|
||||||
|
const GOAL: usize = {
|
||||||
|
let x: &i16 = &5;
|
||||||
|
let y: &dyn core::fmt::Debug = x;
|
||||||
|
let z: &dyn core::fmt::Debug = &y;
|
||||||
|
size_of_val(x) + size_of_val(y) * 10 + size_of_val(z) * 100
|
||||||
|
};
|
||||||
|
"#,
|
||||||
|
1622,
|
||||||
|
);
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
//- minicore: coerce_unsized
|
||||||
|
extern "rust-intrinsic" {
|
||||||
|
pub fn size_of_val<T: ?Sized>(_: *const T) -> usize;
|
||||||
|
}
|
||||||
|
|
||||||
|
const GOAL: usize = {
|
||||||
|
size_of_val("salam")
|
||||||
|
};
|
||||||
|
"#,
|
||||||
|
5,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn min_align_of_val() {
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
//- minicore: coerce_unsized
|
||||||
|
extern "rust-intrinsic" {
|
||||||
|
pub fn min_align_of_val<T: ?Sized>(_: *const T) -> usize;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct X(i32, u8);
|
||||||
|
|
||||||
|
const GOAL: usize = min_align_of_val(&X(1, 2));
|
||||||
|
"#,
|
||||||
|
4,
|
||||||
|
);
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
//- minicore: coerce_unsized
|
||||||
|
extern "rust-intrinsic" {
|
||||||
|
pub fn min_align_of_val<T: ?Sized>(_: *const T) -> usize;
|
||||||
|
}
|
||||||
|
|
||||||
|
const GOAL: usize = {
|
||||||
|
let x: &[i32] = &[1, 2, 3];
|
||||||
|
min_align_of_val(x)
|
||||||
|
};
|
||||||
|
"#,
|
||||||
|
4,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn type_name() {
|
||||||
|
check_str(
|
||||||
|
r#"
|
||||||
|
extern "rust-intrinsic" {
|
||||||
|
pub fn type_name<T: ?Sized>() -> &'static str;
|
||||||
|
}
|
||||||
|
|
||||||
|
const GOAL: &str = type_name::<i32>();
|
||||||
|
"#,
|
||||||
|
"i32",
|
||||||
|
);
|
||||||
|
check_str(
|
||||||
|
r#"
|
||||||
|
extern "rust-intrinsic" {
|
||||||
|
pub fn type_name<T: ?Sized>() -> &'static str;
|
||||||
|
}
|
||||||
|
|
||||||
|
mod mod1 {
|
||||||
|
pub mod mod2 {
|
||||||
|
pub struct Ty;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const GOAL: &str = type_name::<mod1::mod2::Ty>();
|
||||||
|
"#,
|
||||||
|
"mod1::mod2::Ty",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn transmute() {
|
fn transmute() {
|
||||||
check_number(
|
check_number(
|
||||||
|
@ -28,10 +193,29 @@ fn transmute() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn read_via_copy() {
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
extern "rust-intrinsic" {
|
||||||
|
pub fn read_via_copy<T>(e: *const T) -> T;
|
||||||
|
pub fn volatile_load<T>(e: *const T) -> T;
|
||||||
|
}
|
||||||
|
|
||||||
|
const GOAL: i32 = {
|
||||||
|
let x = 2;
|
||||||
|
read_via_copy(&x) + volatile_load(&x)
|
||||||
|
};
|
||||||
|
"#,
|
||||||
|
4,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn const_eval_select() {
|
fn const_eval_select() {
|
||||||
check_number(
|
check_number(
|
||||||
r#"
|
r#"
|
||||||
|
//- minicore: fn
|
||||||
extern "rust-intrinsic" {
|
extern "rust-intrinsic" {
|
||||||
pub fn const_eval_select<ARG, F, G, RET>(arg: ARG, called_in_const: F, called_at_rt: G) -> RET
|
pub fn const_eval_select<ARG, F, G, RET>(arg: ARG, called_in_const: F, called_at_rt: G) -> RET
|
||||||
where
|
where
|
||||||
|
@ -68,7 +252,7 @@ fn wrapping_add() {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn saturating_add() {
|
fn saturating() {
|
||||||
check_number(
|
check_number(
|
||||||
r#"
|
r#"
|
||||||
extern "rust-intrinsic" {
|
extern "rust-intrinsic" {
|
||||||
|
@ -79,6 +263,16 @@ fn saturating_add() {
|
||||||
"#,
|
"#,
|
||||||
255,
|
255,
|
||||||
);
|
);
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
extern "rust-intrinsic" {
|
||||||
|
pub fn saturating_sub<T>(a: T, b: T) -> T;
|
||||||
|
}
|
||||||
|
|
||||||
|
const GOAL: bool = saturating_sub(5u8, 7) == 0 && saturating_sub(8u8, 4) == 4;
|
||||||
|
"#,
|
||||||
|
1,
|
||||||
|
);
|
||||||
check_number(
|
check_number(
|
||||||
r#"
|
r#"
|
||||||
extern "rust-intrinsic" {
|
extern "rust-intrinsic" {
|
||||||
|
@ -112,6 +306,7 @@ fn allocator() {
|
||||||
*ptr = 23;
|
*ptr = 23;
|
||||||
*ptr2 = 32;
|
*ptr2 = 32;
|
||||||
let ptr = __rust_realloc(ptr, 4, 1, 8);
|
let ptr = __rust_realloc(ptr, 4, 1, 8);
|
||||||
|
let ptr = __rust_realloc(ptr, 8, 1, 3);
|
||||||
let ptr2 = ((ptr as usize) + 1) as *mut u8;
|
let ptr2 = ((ptr as usize) + 1) as *mut u8;
|
||||||
*ptr + *ptr2
|
*ptr + *ptr2
|
||||||
};
|
};
|
||||||
|
@ -159,6 +354,24 @@ fn needs_drop() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn discriminant_value() {
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
//- minicore: discriminant, option
|
||||||
|
use core::marker::DiscriminantKind;
|
||||||
|
extern "rust-intrinsic" {
|
||||||
|
pub fn discriminant_value<T>(v: &T) -> <T as DiscriminantKind>::Discriminant;
|
||||||
|
}
|
||||||
|
const GOAL: bool = {
|
||||||
|
discriminant_value(&Some(2i32)) == discriminant_value(&Some(5i32))
|
||||||
|
&& discriminant_value(&Some(2i32)) != discriminant_value(&None::<i32>)
|
||||||
|
};
|
||||||
|
"#,
|
||||||
|
1,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn likely() {
|
fn likely() {
|
||||||
check_number(
|
check_number(
|
||||||
|
@ -327,6 +540,24 @@ fn copy_nonoverlapping() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn write_bytes() {
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
extern "rust-intrinsic" {
|
||||||
|
fn write_bytes<T>(dst: *mut T, val: u8, count: usize);
|
||||||
|
}
|
||||||
|
|
||||||
|
const GOAL: i32 = unsafe {
|
||||||
|
let mut x = 2;
|
||||||
|
write_bytes(&mut x, 5, 1);
|
||||||
|
x
|
||||||
|
};
|
||||||
|
"#,
|
||||||
|
0x05050505,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn copy() {
|
fn copy() {
|
||||||
check_number(
|
check_number(
|
||||||
|
@ -362,6 +593,20 @@ fn ctpop() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ctlz() {
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
extern "rust-intrinsic" {
|
||||||
|
pub fn ctlz<T: Copy>(x: T) -> T;
|
||||||
|
}
|
||||||
|
|
||||||
|
const GOAL: u8 = ctlz(0b0001_1100_u8);
|
||||||
|
"#,
|
||||||
|
3,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn cttz() {
|
fn cttz() {
|
||||||
check_number(
|
check_number(
|
||||||
|
@ -375,3 +620,85 @@ fn cttz() {
|
||||||
3,
|
3,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn rotate() {
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
extern "rust-intrinsic" {
|
||||||
|
pub fn rotate_left<T: Copy>(x: T, y: T) -> T;
|
||||||
|
}
|
||||||
|
|
||||||
|
const GOAL: i64 = rotate_left(0xaa00000000006e1i64, 12);
|
||||||
|
"#,
|
||||||
|
0x6e10aa,
|
||||||
|
);
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
extern "rust-intrinsic" {
|
||||||
|
pub fn rotate_right<T: Copy>(x: T, y: T) -> T;
|
||||||
|
}
|
||||||
|
|
||||||
|
const GOAL: i64 = rotate_right(0x6e10aa, 12);
|
||||||
|
"#,
|
||||||
|
0xaa00000000006e1,
|
||||||
|
);
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
extern "rust-intrinsic" {
|
||||||
|
pub fn rotate_left<T: Copy>(x: T, y: T) -> T;
|
||||||
|
}
|
||||||
|
|
||||||
|
const GOAL: i8 = rotate_left(129, 2);
|
||||||
|
"#,
|
||||||
|
6,
|
||||||
|
);
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
extern "rust-intrinsic" {
|
||||||
|
pub fn rotate_right<T: Copy>(x: T, y: T) -> T;
|
||||||
|
}
|
||||||
|
|
||||||
|
const GOAL: i32 = rotate_right(10006016, 1020315);
|
||||||
|
"#,
|
||||||
|
320192512,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn simd() {
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
pub struct i8x16(
|
||||||
|
i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,
|
||||||
|
);
|
||||||
|
extern "platform-intrinsic" {
|
||||||
|
pub fn simd_bitmask<T, U>(x: T) -> U;
|
||||||
|
}
|
||||||
|
const GOAL: u16 = simd_bitmask(i8x16(
|
||||||
|
0, 1, 0, 0, 2, 255, 100, 0, 50, 0, 1, 1, 0, 0, 0, 0
|
||||||
|
));
|
||||||
|
"#,
|
||||||
|
0b0000110101110010,
|
||||||
|
);
|
||||||
|
check_number(
|
||||||
|
r#"
|
||||||
|
pub struct i8x16(
|
||||||
|
i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,
|
||||||
|
);
|
||||||
|
extern "platform-intrinsic" {
|
||||||
|
pub fn simd_lt<T, U>(x: T, y: T) -> U;
|
||||||
|
pub fn simd_bitmask<T, U>(x: T) -> U;
|
||||||
|
}
|
||||||
|
const GOAL: u16 = simd_bitmask(simd_lt::<i8x16, i8x16>(
|
||||||
|
i8x16(
|
||||||
|
-105, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10
|
||||||
|
),
|
||||||
|
i8x16(
|
||||||
|
-4, -3, -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11
|
||||||
|
),
|
||||||
|
));
|
||||||
|
"#,
|
||||||
|
0xFFFF,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -110,6 +110,14 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
|
||||||
#[salsa::invoke(crate::layout::target_data_layout_query)]
|
#[salsa::invoke(crate::layout::target_data_layout_query)]
|
||||||
fn target_data_layout(&self, krate: CrateId) -> Option<Arc<TargetDataLayout>>;
|
fn target_data_layout(&self, krate: CrateId) -> Option<Arc<TargetDataLayout>>;
|
||||||
|
|
||||||
|
#[salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
|
||||||
|
fn lookup_impl_method(
|
||||||
|
&self,
|
||||||
|
env: Arc<crate::TraitEnvironment>,
|
||||||
|
func: FunctionId,
|
||||||
|
fn_subst: Substitution,
|
||||||
|
) -> (FunctionId, Substitution);
|
||||||
|
|
||||||
#[salsa::invoke(crate::lower::callable_item_sig)]
|
#[salsa::invoke(crate::lower::callable_item_sig)]
|
||||||
fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig;
|
fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig;
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ mod unsafe_check;
|
||||||
mod decl_check;
|
mod decl_check;
|
||||||
|
|
||||||
pub use crate::diagnostics::{
|
pub use crate::diagnostics::{
|
||||||
decl_check::{incorrect_case, IncorrectCase},
|
decl_check::{incorrect_case, CaseType, IncorrectCase},
|
||||||
expr::{
|
expr::{
|
||||||
record_literal_missing_fields, record_pattern_missing_fields, BodyValidationDiagnostic,
|
record_literal_missing_fields, record_pattern_missing_fields, BodyValidationDiagnostic,
|
||||||
},
|
},
|
||||||
|
|
|
@ -57,11 +57,11 @@ pub fn incorrect_case(
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum CaseType {
|
pub enum CaseType {
|
||||||
// `some_var`
|
/// `some_var`
|
||||||
LowerSnakeCase,
|
LowerSnakeCase,
|
||||||
// `SOME_CONST`
|
/// `SOME_CONST`
|
||||||
UpperSnakeCase,
|
UpperSnakeCase,
|
||||||
// `SomeStruct`
|
/// `SomeStruct`
|
||||||
UpperCamelCase,
|
UpperCamelCase,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -181,6 +181,7 @@ impl<'a> DeclValidator<'a> {
|
||||||
AttrDefId::TraitAliasId(taid) => Some(taid.lookup(self.db.upcast()).container.into()),
|
AttrDefId::TraitAliasId(taid) => Some(taid.lookup(self.db.upcast()).container.into()),
|
||||||
AttrDefId::ImplId(iid) => Some(iid.lookup(self.db.upcast()).container.into()),
|
AttrDefId::ImplId(iid) => Some(iid.lookup(self.db.upcast()).container.into()),
|
||||||
AttrDefId::ExternBlockId(id) => Some(id.lookup(self.db.upcast()).container.into()),
|
AttrDefId::ExternBlockId(id) => Some(id.lookup(self.db.upcast()).container.into()),
|
||||||
|
AttrDefId::ExternCrateId(id) => Some(id.lookup(self.db.upcast()).container.into()),
|
||||||
// These warnings should not explore macro definitions at all
|
// These warnings should not explore macro definitions at all
|
||||||
AttrDefId::MacroId(_) => None,
|
AttrDefId::MacroId(_) => None,
|
||||||
AttrDefId::AdtId(aid) => match aid {
|
AttrDefId::AdtId(aid) => match aid {
|
||||||
|
|
|
@ -192,7 +192,7 @@ pub trait HirDisplay {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> HirFormatter<'a> {
|
impl HirFormatter<'_> {
|
||||||
pub fn write_joined<T: HirDisplay>(
|
pub fn write_joined<T: HirDisplay>(
|
||||||
&mut self,
|
&mut self,
|
||||||
iter: impl IntoIterator<Item = T>,
|
iter: impl IntoIterator<Item = T>,
|
||||||
|
@ -342,7 +342,7 @@ impl<T: HirDisplay> HirDisplayWrapper<'_, T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, T> fmt::Display for HirDisplayWrapper<'a, T>
|
impl<T> fmt::Display for HirDisplayWrapper<'_, T>
|
||||||
where
|
where
|
||||||
T: HirDisplay,
|
T: HirDisplay,
|
||||||
{
|
{
|
||||||
|
@ -360,7 +360,7 @@ where
|
||||||
|
|
||||||
const TYPE_HINT_TRUNCATION: &str = "…";
|
const TYPE_HINT_TRUNCATION: &str = "…";
|
||||||
|
|
||||||
impl<T: HirDisplay> HirDisplay for &'_ T {
|
impl<T: HirDisplay> HirDisplay for &T {
|
||||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
HirDisplay::hir_fmt(*self, f)
|
HirDisplay::hir_fmt(*self, f)
|
||||||
}
|
}
|
||||||
|
@ -446,28 +446,6 @@ impl HirDisplay for Const {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct HexifiedConst(pub Const);
|
|
||||||
|
|
||||||
impl HirDisplay for HexifiedConst {
|
|
||||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
|
||||||
let data = &self.0.data(Interner);
|
|
||||||
if let TyKind::Scalar(s) = data.ty.kind(Interner) {
|
|
||||||
if matches!(s, Scalar::Int(_) | Scalar::Uint(_)) {
|
|
||||||
if let ConstValue::Concrete(c) = &data.value {
|
|
||||||
if let ConstScalar::Bytes(b, m) = &c.interned {
|
|
||||||
let value = u128::from_le_bytes(pad16(b, false));
|
|
||||||
if value >= 10 {
|
|
||||||
render_const_scalar(f, &b, m, &data.ty)?;
|
|
||||||
return write!(f, " ({:#X})", value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.0.hir_fmt(f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn render_const_scalar(
|
fn render_const_scalar(
|
||||||
f: &mut HirFormatter<'_>,
|
f: &mut HirFormatter<'_>,
|
||||||
b: &[u8],
|
b: &[u8],
|
||||||
|
@ -481,28 +459,28 @@ fn render_const_scalar(
|
||||||
TyKind::Scalar(s) => match s {
|
TyKind::Scalar(s) => match s {
|
||||||
Scalar::Bool => write!(f, "{}", if b[0] == 0 { false } else { true }),
|
Scalar::Bool => write!(f, "{}", if b[0] == 0 { false } else { true }),
|
||||||
Scalar::Char => {
|
Scalar::Char => {
|
||||||
let x = u128::from_le_bytes(pad16(b, false)) as u32;
|
let it = u128::from_le_bytes(pad16(b, false)) as u32;
|
||||||
let Ok(c) = char::try_from(x) else {
|
let Ok(c) = char::try_from(it) else {
|
||||||
return f.write_str("<unicode-error>");
|
return f.write_str("<unicode-error>");
|
||||||
};
|
};
|
||||||
write!(f, "{c:?}")
|
write!(f, "{c:?}")
|
||||||
}
|
}
|
||||||
Scalar::Int(_) => {
|
Scalar::Int(_) => {
|
||||||
let x = i128::from_le_bytes(pad16(b, true));
|
let it = i128::from_le_bytes(pad16(b, true));
|
||||||
write!(f, "{x}")
|
write!(f, "{it}")
|
||||||
}
|
}
|
||||||
Scalar::Uint(_) => {
|
Scalar::Uint(_) => {
|
||||||
let x = u128::from_le_bytes(pad16(b, false));
|
let it = u128::from_le_bytes(pad16(b, false));
|
||||||
write!(f, "{x}")
|
write!(f, "{it}")
|
||||||
}
|
}
|
||||||
Scalar::Float(fl) => match fl {
|
Scalar::Float(fl) => match fl {
|
||||||
chalk_ir::FloatTy::F32 => {
|
chalk_ir::FloatTy::F32 => {
|
||||||
let x = f32::from_le_bytes(b.try_into().unwrap());
|
let it = f32::from_le_bytes(b.try_into().unwrap());
|
||||||
write!(f, "{x:?}")
|
write!(f, "{it:?}")
|
||||||
}
|
}
|
||||||
chalk_ir::FloatTy::F64 => {
|
chalk_ir::FloatTy::F64 => {
|
||||||
let x = f64::from_le_bytes(b.try_into().unwrap());
|
let it = f64::from_le_bytes(b.try_into().unwrap());
|
||||||
write!(f, "{x:?}")
|
write!(f, "{it:?}")
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -636,7 +614,8 @@ fn render_const_scalar(
|
||||||
}
|
}
|
||||||
hir_def::AdtId::EnumId(e) => {
|
hir_def::AdtId::EnumId(e) => {
|
||||||
let Some((var_id, var_layout)) =
|
let Some((var_id, var_layout)) =
|
||||||
detect_variant_from_bytes(&layout, f.db, krate, b, e) else {
|
detect_variant_from_bytes(&layout, f.db, krate, b, e)
|
||||||
|
else {
|
||||||
return f.write_str("<failed-to-detect-variant>");
|
return f.write_str("<failed-to-detect-variant>");
|
||||||
};
|
};
|
||||||
let data = &f.db.enum_data(e).variants[var_id];
|
let data = &f.db.enum_data(e).variants[var_id];
|
||||||
|
@ -658,8 +637,8 @@ fn render_const_scalar(
|
||||||
}
|
}
|
||||||
TyKind::FnDef(..) => ty.hir_fmt(f),
|
TyKind::FnDef(..) => ty.hir_fmt(f),
|
||||||
TyKind::Function(_) | TyKind::Raw(_, _) => {
|
TyKind::Function(_) | TyKind::Raw(_, _) => {
|
||||||
let x = u128::from_le_bytes(pad16(b, false));
|
let it = u128::from_le_bytes(pad16(b, false));
|
||||||
write!(f, "{:#X} as ", x)?;
|
write!(f, "{:#X} as ", it)?;
|
||||||
ty.hir_fmt(f)
|
ty.hir_fmt(f)
|
||||||
}
|
}
|
||||||
TyKind::Array(ty, len) => {
|
TyKind::Array(ty, len) => {
|
||||||
|
@ -735,7 +714,7 @@ fn render_variant_after_name(
|
||||||
}
|
}
|
||||||
write!(f, " }}")?;
|
write!(f, " }}")?;
|
||||||
} else {
|
} else {
|
||||||
let mut it = it.map(|x| x.0);
|
let mut it = it.map(|it| it.0);
|
||||||
write!(f, "(")?;
|
write!(f, "(")?;
|
||||||
if let Some(id) = it.next() {
|
if let Some(id) = it.next() {
|
||||||
render_field(f, id)?;
|
render_field(f, id)?;
|
||||||
|
@ -1277,19 +1256,20 @@ fn hir_fmt_generics(
|
||||||
i: usize,
|
i: usize,
|
||||||
parameters: &Substitution,
|
parameters: &Substitution,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
if parameter.ty(Interner).map(|x| x.kind(Interner)) == Some(&TyKind::Error)
|
if parameter.ty(Interner).map(|it| it.kind(Interner))
|
||||||
|
== Some(&TyKind::Error)
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
if let Some(ConstValue::Concrete(c)) =
|
if let Some(ConstValue::Concrete(c)) =
|
||||||
parameter.constant(Interner).map(|x| &x.data(Interner).value)
|
parameter.constant(Interner).map(|it| &it.data(Interner).value)
|
||||||
{
|
{
|
||||||
if c.interned == ConstScalar::Unknown {
|
if c.interned == ConstScalar::Unknown {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let default_parameter = match default_parameters.get(i) {
|
let default_parameter = match default_parameters.get(i) {
|
||||||
Some(x) => x,
|
Some(it) => it,
|
||||||
None => return true,
|
None => return true,
|
||||||
};
|
};
|
||||||
let actual_default =
|
let actual_default =
|
||||||
|
|
|
@ -290,7 +290,7 @@ impl Default for InternedStandardTypes {
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
/// Note that for a struct, the 'deep' unsizing of the struct is not recorded.
|
/// Note that for a struct, the 'deep' unsizing of the struct is not recorded.
|
||||||
/// E.g., `struct Foo<T> { x: T }` we can coerce &Foo<[i32; 4]> to &Foo<[i32]>
|
/// E.g., `struct Foo<T> { it: T }` we can coerce &Foo<[i32; 4]> to &Foo<[i32]>
|
||||||
/// The autoderef and -ref are the same as in the above example, but the type
|
/// The autoderef and -ref are the same as in the above example, but the type
|
||||||
/// stored in `unsize` is `Foo<[i32]>`, we don't store any further detail about
|
/// stored in `unsize` is `Foo<[i32]>`, we don't store any further detail about
|
||||||
/// the underlying conversions from `[i32; 4]` to `[i32]`.
|
/// the underlying conversions from `[i32; 4]` to `[i32]`.
|
||||||
|
@ -1172,7 +1172,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
unresolved: Option<usize>,
|
unresolved: Option<usize>,
|
||||||
path: &ModPath,
|
path: &ModPath,
|
||||||
) -> (Ty, Option<VariantId>) {
|
) -> (Ty, Option<VariantId>) {
|
||||||
let remaining = unresolved.map(|x| path.segments()[x..].len()).filter(|x| x > &0);
|
let remaining = unresolved.map(|it| path.segments()[it..].len()).filter(|it| it > &0);
|
||||||
match remaining {
|
match remaining {
|
||||||
None => {
|
None => {
|
||||||
let variant = ty.as_adt().and_then(|(adt_id, _)| match adt_id {
|
let variant = ty.as_adt().and_then(|(adt_id, _)| match adt_id {
|
||||||
|
@ -1232,7 +1232,9 @@ impl<'a> InferenceContext<'a> {
|
||||||
.as_function()?
|
.as_function()?
|
||||||
.lookup(self.db.upcast())
|
.lookup(self.db.upcast())
|
||||||
.container
|
.container
|
||||||
else { return None };
|
else {
|
||||||
|
return None;
|
||||||
|
};
|
||||||
self.resolve_output_on(trait_)
|
self.resolve_output_on(trait_)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1322,7 +1324,7 @@ impl Expectation {
|
||||||
/// The primary use case is where the expected type is a fat pointer,
|
/// The primary use case is where the expected type is a fat pointer,
|
||||||
/// like `&[isize]`. For example, consider the following statement:
|
/// like `&[isize]`. For example, consider the following statement:
|
||||||
///
|
///
|
||||||
/// let x: &[isize] = &[1, 2, 3];
|
/// let it: &[isize] = &[1, 2, 3];
|
||||||
///
|
///
|
||||||
/// In this case, the expected type for the `&[1, 2, 3]` expression is
|
/// In this case, the expected type for the `&[1, 2, 3]` expression is
|
||||||
/// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
|
/// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
|
||||||
|
|
|
@ -139,7 +139,7 @@ impl HirPlace {
|
||||||
) -> CaptureKind {
|
) -> CaptureKind {
|
||||||
match current_capture {
|
match current_capture {
|
||||||
CaptureKind::ByRef(BorrowKind::Mut { .. }) => {
|
CaptureKind::ByRef(BorrowKind::Mut { .. }) => {
|
||||||
if self.projections[len..].iter().any(|x| *x == ProjectionElem::Deref) {
|
if self.projections[len..].iter().any(|it| *it == ProjectionElem::Deref) {
|
||||||
current_capture = CaptureKind::ByRef(BorrowKind::Unique);
|
current_capture = CaptureKind::ByRef(BorrowKind::Unique);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -199,7 +199,7 @@ impl CapturedItem {
|
||||||
.to_string(),
|
.to_string(),
|
||||||
VariantData::Tuple(fields) => fields
|
VariantData::Tuple(fields) => fields
|
||||||
.iter()
|
.iter()
|
||||||
.position(|x| x.0 == f.local_id)
|
.position(|it| it.0 == f.local_id)
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
.to_string(),
|
.to_string(),
|
||||||
VariantData::Unit => "[missing field]".to_string(),
|
VariantData::Unit => "[missing field]".to_string(),
|
||||||
|
@ -439,10 +439,10 @@ impl InferenceContext<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn walk_expr(&mut self, tgt_expr: ExprId) {
|
fn walk_expr(&mut self, tgt_expr: ExprId) {
|
||||||
if let Some(x) = self.result.expr_adjustments.get_mut(&tgt_expr) {
|
if let Some(it) = self.result.expr_adjustments.get_mut(&tgt_expr) {
|
||||||
// FIXME: this take is completely unneeded, and just is here to make borrow checker
|
// FIXME: this take is completely unneeded, and just is here to make borrow checker
|
||||||
// happy. Remove it if you can.
|
// happy. Remove it if you can.
|
||||||
let x_taken = mem::take(x);
|
let x_taken = mem::take(it);
|
||||||
self.walk_expr_with_adjust(tgt_expr, &x_taken);
|
self.walk_expr_with_adjust(tgt_expr, &x_taken);
|
||||||
*self.result.expr_adjustments.get_mut(&tgt_expr).unwrap() = x_taken;
|
*self.result.expr_adjustments.get_mut(&tgt_expr).unwrap() = x_taken;
|
||||||
} else {
|
} else {
|
||||||
|
@ -536,7 +536,7 @@ impl InferenceContext<'_> {
|
||||||
if let &Some(expr) = spread {
|
if let &Some(expr) = spread {
|
||||||
self.consume_expr(expr);
|
self.consume_expr(expr);
|
||||||
}
|
}
|
||||||
self.consume_exprs(fields.iter().map(|x| x.expr));
|
self.consume_exprs(fields.iter().map(|it| it.expr));
|
||||||
}
|
}
|
||||||
Expr::Field { expr, name: _ } => self.select_from_expr(*expr),
|
Expr::Field { expr, name: _ } => self.select_from_expr(*expr),
|
||||||
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
|
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
|
||||||
|
@ -548,7 +548,7 @@ impl InferenceContext<'_> {
|
||||||
} else if let Some((f, _)) = self.result.method_resolution(tgt_expr) {
|
} else if let Some((f, _)) = self.result.method_resolution(tgt_expr) {
|
||||||
let mutability = 'b: {
|
let mutability = 'b: {
|
||||||
if let Some(deref_trait) =
|
if let Some(deref_trait) =
|
||||||
self.resolve_lang_item(LangItem::DerefMut).and_then(|x| x.as_trait())
|
self.resolve_lang_item(LangItem::DerefMut).and_then(|it| it.as_trait())
|
||||||
{
|
{
|
||||||
if let Some(deref_fn) =
|
if let Some(deref_fn) =
|
||||||
self.db.trait_data(deref_trait).method_by_name(&name![deref_mut])
|
self.db.trait_data(deref_trait).method_by_name(&name![deref_mut])
|
||||||
|
@ -615,8 +615,8 @@ impl InferenceContext<'_> {
|
||||||
"We sort closures, so we should always have data for inner closures",
|
"We sort closures, so we should always have data for inner closures",
|
||||||
);
|
);
|
||||||
let mut cc = mem::take(&mut self.current_captures);
|
let mut cc = mem::take(&mut self.current_captures);
|
||||||
cc.extend(captures.iter().filter(|x| self.is_upvar(&x.place)).map(|x| {
|
cc.extend(captures.iter().filter(|it| self.is_upvar(&it.place)).map(|it| {
|
||||||
CapturedItemWithoutTy { place: x.place.clone(), kind: x.kind, span: x.span }
|
CapturedItemWithoutTy { place: it.place.clone(), kind: it.kind, span: it.span }
|
||||||
}));
|
}));
|
||||||
self.current_captures = cc;
|
self.current_captures = cc;
|
||||||
}
|
}
|
||||||
|
@ -694,7 +694,7 @@ impl InferenceContext<'_> {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
if self.result.pat_adjustments.get(&p).map_or(false, |x| !x.is_empty()) {
|
if self.result.pat_adjustments.get(&p).map_or(false, |it| !it.is_empty()) {
|
||||||
for_mut = BorrowKind::Unique;
|
for_mut = BorrowKind::Unique;
|
||||||
}
|
}
|
||||||
self.body.walk_pats_shallow(p, |p| self.walk_pat_inner(p, update_result, for_mut));
|
self.body.walk_pats_shallow(p, |p| self.walk_pat_inner(p, update_result, for_mut));
|
||||||
|
@ -706,9 +706,9 @@ impl InferenceContext<'_> {
|
||||||
|
|
||||||
fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty {
|
fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty {
|
||||||
let mut ty = None;
|
let mut ty = None;
|
||||||
if let Some(x) = self.result.expr_adjustments.get(&e) {
|
if let Some(it) = self.result.expr_adjustments.get(&e) {
|
||||||
if let Some(x) = x.last() {
|
if let Some(it) = it.last() {
|
||||||
ty = Some(x.target.clone());
|
ty = Some(it.target.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ty.unwrap_or_else(|| self.expr_ty(e))
|
ty.unwrap_or_else(|| self.expr_ty(e))
|
||||||
|
@ -727,7 +727,7 @@ impl InferenceContext<'_> {
|
||||||
// FIXME: We handle closure as a special case, since chalk consider every closure as copy. We
|
// FIXME: We handle closure as a special case, since chalk consider every closure as copy. We
|
||||||
// should probably let chalk know which closures are copy, but I don't know how doing it
|
// should probably let chalk know which closures are copy, but I don't know how doing it
|
||||||
// without creating query cycles.
|
// without creating query cycles.
|
||||||
return self.result.closure_info.get(id).map(|x| x.1 == FnTrait::Fn).unwrap_or(true);
|
return self.result.closure_info.get(id).map(|it| it.1 == FnTrait::Fn).unwrap_or(true);
|
||||||
}
|
}
|
||||||
self.table.resolve_completely(ty).is_copy(self.db, self.owner)
|
self.table.resolve_completely(ty).is_copy(self.db, self.owner)
|
||||||
}
|
}
|
||||||
|
@ -748,7 +748,7 @@ impl InferenceContext<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn minimize_captures(&mut self) {
|
fn minimize_captures(&mut self) {
|
||||||
self.current_captures.sort_by_key(|x| x.place.projections.len());
|
self.current_captures.sort_by_key(|it| it.place.projections.len());
|
||||||
let mut hash_map = HashMap::<HirPlace, usize>::new();
|
let mut hash_map = HashMap::<HirPlace, usize>::new();
|
||||||
let result = mem::take(&mut self.current_captures);
|
let result = mem::take(&mut self.current_captures);
|
||||||
for item in result {
|
for item in result {
|
||||||
|
@ -759,7 +759,7 @@ impl InferenceContext<'_> {
|
||||||
break Some(*k);
|
break Some(*k);
|
||||||
}
|
}
|
||||||
match it.next() {
|
match it.next() {
|
||||||
Some(x) => lookup_place.projections.push(x.clone()),
|
Some(it) => lookup_place.projections.push(it.clone()),
|
||||||
None => break None,
|
None => break None,
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -780,7 +780,7 @@ impl InferenceContext<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn consume_with_pat(&mut self, mut place: HirPlace, pat: PatId) {
|
fn consume_with_pat(&mut self, mut place: HirPlace, pat: PatId) {
|
||||||
let cnt = self.result.pat_adjustments.get(&pat).map(|x| x.len()).unwrap_or_default();
|
let cnt = self.result.pat_adjustments.get(&pat).map(|it| it.len()).unwrap_or_default();
|
||||||
place.projections = place
|
place.projections = place
|
||||||
.projections
|
.projections
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -894,10 +894,10 @@ impl InferenceContext<'_> {
|
||||||
|
|
||||||
fn closure_kind(&self) -> FnTrait {
|
fn closure_kind(&self) -> FnTrait {
|
||||||
let mut r = FnTrait::Fn;
|
let mut r = FnTrait::Fn;
|
||||||
for x in &self.current_captures {
|
for it in &self.current_captures {
|
||||||
r = cmp::min(
|
r = cmp::min(
|
||||||
r,
|
r,
|
||||||
match &x.kind {
|
match &it.kind {
|
||||||
CaptureKind::ByRef(BorrowKind::Unique | BorrowKind::Mut { .. }) => {
|
CaptureKind::ByRef(BorrowKind::Unique | BorrowKind::Mut { .. }) => {
|
||||||
FnTrait::FnMut
|
FnTrait::FnMut
|
||||||
}
|
}
|
||||||
|
@ -933,7 +933,7 @@ impl InferenceContext<'_> {
|
||||||
}
|
}
|
||||||
self.minimize_captures();
|
self.minimize_captures();
|
||||||
let result = mem::take(&mut self.current_captures);
|
let result = mem::take(&mut self.current_captures);
|
||||||
let captures = result.into_iter().map(|x| x.with_ty(self)).collect::<Vec<_>>();
|
let captures = result.into_iter().map(|it| it.with_ty(self)).collect::<Vec<_>>();
|
||||||
self.result.closure_info.insert(closure, (captures, closure_kind));
|
self.result.closure_info.insert(closure, (captures, closure_kind));
|
||||||
closure_kind
|
closure_kind
|
||||||
}
|
}
|
||||||
|
@ -973,20 +973,20 @@ impl InferenceContext<'_> {
|
||||||
fn sort_closures(&mut self) -> Vec<(ClosureId, Vec<(Ty, Ty, Vec<Ty>, ExprId)>)> {
|
fn sort_closures(&mut self) -> Vec<(ClosureId, Vec<(Ty, Ty, Vec<Ty>, ExprId)>)> {
|
||||||
let mut deferred_closures = mem::take(&mut self.deferred_closures);
|
let mut deferred_closures = mem::take(&mut self.deferred_closures);
|
||||||
let mut dependents_count: FxHashMap<ClosureId, usize> =
|
let mut dependents_count: FxHashMap<ClosureId, usize> =
|
||||||
deferred_closures.keys().map(|x| (*x, 0)).collect();
|
deferred_closures.keys().map(|it| (*it, 0)).collect();
|
||||||
for (_, deps) in &self.closure_dependencies {
|
for (_, deps) in &self.closure_dependencies {
|
||||||
for dep in deps {
|
for dep in deps {
|
||||||
*dependents_count.entry(*dep).or_default() += 1;
|
*dependents_count.entry(*dep).or_default() += 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut queue: Vec<_> =
|
let mut queue: Vec<_> =
|
||||||
deferred_closures.keys().copied().filter(|x| dependents_count[x] == 0).collect();
|
deferred_closures.keys().copied().filter(|it| dependents_count[it] == 0).collect();
|
||||||
let mut result = vec![];
|
let mut result = vec![];
|
||||||
while let Some(x) = queue.pop() {
|
while let Some(it) = queue.pop() {
|
||||||
if let Some(d) = deferred_closures.remove(&x) {
|
if let Some(d) = deferred_closures.remove(&it) {
|
||||||
result.push((x, d));
|
result.push((it, d));
|
||||||
}
|
}
|
||||||
for dep in self.closure_dependencies.get(&x).into_iter().flat_map(|x| x.iter()) {
|
for dep in self.closure_dependencies.get(&it).into_iter().flat_map(|it| it.iter()) {
|
||||||
let cnt = dependents_count.get_mut(dep).unwrap();
|
let cnt = dependents_count.get_mut(dep).unwrap();
|
||||||
*cnt -= 1;
|
*cnt -= 1;
|
||||||
if *cnt == 0 {
|
if *cnt == 0 {
|
||||||
|
|
|
@ -220,7 +220,7 @@ pub(crate) fn coerce(
|
||||||
Ok((adjustments, table.resolve_with_fallback(ty, &fallback)))
|
Ok((adjustments, table.resolve_with_fallback(ty, &fallback)))
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> InferenceContext<'a> {
|
impl InferenceContext<'_> {
|
||||||
/// Unify two types, but may coerce the first one to the second one
|
/// Unify two types, but may coerce the first one to the second one
|
||||||
/// using "implicit coercion rules" if needed.
|
/// using "implicit coercion rules" if needed.
|
||||||
pub(super) fn coerce(
|
pub(super) fn coerce(
|
||||||
|
@ -239,7 +239,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> InferenceTable<'a> {
|
impl InferenceTable<'_> {
|
||||||
/// Unify two types, but may coerce the first one to the second one
|
/// Unify two types, but may coerce the first one to the second one
|
||||||
/// using "implicit coercion rules" if needed.
|
/// using "implicit coercion rules" if needed.
|
||||||
pub(crate) fn coerce(
|
pub(crate) fn coerce(
|
||||||
|
@ -377,7 +377,7 @@ impl<'a> InferenceTable<'a> {
|
||||||
|
|
||||||
let snapshot = self.snapshot();
|
let snapshot = self.snapshot();
|
||||||
|
|
||||||
let mut autoderef = Autoderef::new(self, from_ty.clone());
|
let mut autoderef = Autoderef::new(self, from_ty.clone(), false);
|
||||||
let mut first_error = None;
|
let mut first_error = None;
|
||||||
let mut found = None;
|
let mut found = None;
|
||||||
|
|
||||||
|
|
|
@ -50,7 +50,7 @@ use super::{
|
||||||
InferenceContext, InferenceDiagnostic, TypeMismatch,
|
InferenceContext, InferenceDiagnostic, TypeMismatch,
|
||||||
};
|
};
|
||||||
|
|
||||||
impl<'a> InferenceContext<'a> {
|
impl InferenceContext<'_> {
|
||||||
pub(crate) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
|
pub(crate) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
|
||||||
let ty = self.infer_expr_inner(tgt_expr, expected);
|
let ty = self.infer_expr_inner(tgt_expr, expected);
|
||||||
if let Some(expected_ty) = expected.only_has_type(&mut self.table) {
|
if let Some(expected_ty) = expected.only_has_type(&mut self.table) {
|
||||||
|
@ -316,7 +316,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
}
|
}
|
||||||
Expr::Call { callee, args, .. } => {
|
Expr::Call { callee, args, .. } => {
|
||||||
let callee_ty = self.infer_expr(*callee, &Expectation::none());
|
let callee_ty = self.infer_expr(*callee, &Expectation::none());
|
||||||
let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone());
|
let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone(), false);
|
||||||
let (res, derefed_callee) = 'b: {
|
let (res, derefed_callee) = 'b: {
|
||||||
// manual loop to be able to access `derefs.table`
|
// manual loop to be able to access `derefs.table`
|
||||||
while let Some((callee_deref_ty, _)) = derefs.next() {
|
while let Some((callee_deref_ty, _)) = derefs.next() {
|
||||||
|
@ -928,7 +928,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
if let TyKind::Ref(Mutability::Mut, _, inner) = derefed_callee.kind(Interner) {
|
if let TyKind::Ref(Mutability::Mut, _, inner) = derefed_callee.kind(Interner) {
|
||||||
if adjustments
|
if adjustments
|
||||||
.last()
|
.last()
|
||||||
.map(|x| matches!(x.kind, Adjust::Borrow(_)))
|
.map(|it| matches!(it.kind, Adjust::Borrow(_)))
|
||||||
.unwrap_or(true)
|
.unwrap_or(true)
|
||||||
{
|
{
|
||||||
// prefer reborrow to move
|
// prefer reborrow to move
|
||||||
|
@ -1385,7 +1385,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
receiver_ty: &Ty,
|
receiver_ty: &Ty,
|
||||||
name: &Name,
|
name: &Name,
|
||||||
) -> Option<(Ty, Option<FieldId>, Vec<Adjustment>, bool)> {
|
) -> Option<(Ty, Option<FieldId>, Vec<Adjustment>, bool)> {
|
||||||
let mut autoderef = Autoderef::new(&mut self.table, receiver_ty.clone());
|
let mut autoderef = Autoderef::new(&mut self.table, receiver_ty.clone(), false);
|
||||||
let mut private_field = None;
|
let mut private_field = None;
|
||||||
let res = autoderef.by_ref().find_map(|(derefed_ty, _)| {
|
let res = autoderef.by_ref().find_map(|(derefed_ty, _)| {
|
||||||
let (field_id, parameters) = match derefed_ty.kind(Interner) {
|
let (field_id, parameters) = match derefed_ty.kind(Interner) {
|
||||||
|
@ -1449,6 +1449,13 @@ impl<'a> InferenceContext<'a> {
|
||||||
|
|
||||||
fn infer_field_access(&mut self, tgt_expr: ExprId, receiver: ExprId, name: &Name) -> Ty {
|
fn infer_field_access(&mut self, tgt_expr: ExprId, receiver: ExprId, name: &Name) -> Ty {
|
||||||
let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none());
|
let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none());
|
||||||
|
|
||||||
|
if name.is_missing() {
|
||||||
|
// Bail out early, don't even try to look up field. Also, we don't issue an unresolved
|
||||||
|
// field diagnostic because this is a syntax error rather than a semantic error.
|
||||||
|
return self.err_ty();
|
||||||
|
}
|
||||||
|
|
||||||
match self.lookup_field(&receiver_ty, name) {
|
match self.lookup_field(&receiver_ty, name) {
|
||||||
Some((ty, field_id, adjustments, is_public)) => {
|
Some((ty, field_id, adjustments, is_public)) => {
|
||||||
self.write_expr_adj(receiver, adjustments);
|
self.write_expr_adj(receiver, adjustments);
|
||||||
|
|
|
@ -12,7 +12,7 @@ use crate::{lower::lower_to_chalk_mutability, Adjust, Adjustment, AutoBorrow, Ov
|
||||||
|
|
||||||
use super::InferenceContext;
|
use super::InferenceContext;
|
||||||
|
|
||||||
impl<'a> InferenceContext<'a> {
|
impl InferenceContext<'_> {
|
||||||
pub(crate) fn infer_mut_body(&mut self) {
|
pub(crate) fn infer_mut_body(&mut self) {
|
||||||
self.infer_mut_expr(self.body.body_expr, Mutability::Not);
|
self.infer_mut_expr(self.body.body_expr, Mutability::Not);
|
||||||
}
|
}
|
||||||
|
@ -73,12 +73,12 @@ impl<'a> InferenceContext<'a> {
|
||||||
self.infer_mut_expr(c, Mutability::Not);
|
self.infer_mut_expr(c, Mutability::Not);
|
||||||
self.infer_mut_expr(body, Mutability::Not);
|
self.infer_mut_expr(body, Mutability::Not);
|
||||||
}
|
}
|
||||||
Expr::MethodCall { receiver: x, method_name: _, args, generic_args: _ }
|
Expr::MethodCall { receiver: it, method_name: _, args, generic_args: _ }
|
||||||
| Expr::Call { callee: x, args, is_assignee_expr: _ } => {
|
| Expr::Call { callee: it, args, is_assignee_expr: _ } => {
|
||||||
self.infer_mut_not_expr_iter(args.iter().copied().chain(Some(*x)));
|
self.infer_mut_not_expr_iter(args.iter().copied().chain(Some(*it)));
|
||||||
}
|
}
|
||||||
Expr::Match { expr, arms } => {
|
Expr::Match { expr, arms } => {
|
||||||
let m = self.pat_iter_bound_mutability(arms.iter().map(|x| x.pat));
|
let m = self.pat_iter_bound_mutability(arms.iter().map(|it| it.pat));
|
||||||
self.infer_mut_expr(*expr, m);
|
self.infer_mut_expr(*expr, m);
|
||||||
for arm in arms.iter() {
|
for arm in arms.iter() {
|
||||||
self.infer_mut_expr(arm.expr, Mutability::Not);
|
self.infer_mut_expr(arm.expr, Mutability::Not);
|
||||||
|
@ -96,7 +96,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Expr::RecordLit { path: _, fields, spread, ellipsis: _, is_assignee_expr: _ } => {
|
Expr::RecordLit { path: _, fields, spread, ellipsis: _, is_assignee_expr: _ } => {
|
||||||
self.infer_mut_not_expr_iter(fields.iter().map(|x| x.expr).chain(*spread))
|
self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread))
|
||||||
}
|
}
|
||||||
&Expr::Index { base, index } => {
|
&Expr::Index { base, index } => {
|
||||||
if mutability == Mutability::Mut {
|
if mutability == Mutability::Mut {
|
||||||
|
@ -204,8 +204,8 @@ impl<'a> InferenceContext<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks if the pat contains a `ref mut` binding. Such paths makes the context of bounded expressions
|
/// Checks if the pat contains a `ref mut` binding. Such paths makes the context of bounded expressions
|
||||||
/// mutable. For example in `let (ref mut x0, ref x1) = *x;` we need to use `DerefMut` for `*x` but in
|
/// mutable. For example in `let (ref mut x0, ref x1) = *it;` we need to use `DerefMut` for `*it` but in
|
||||||
/// `let (ref x0, ref x1) = *x;` we should use `Deref`.
|
/// `let (ref x0, ref x1) = *it;` we should use `Deref`.
|
||||||
fn pat_bound_mutability(&self, pat: PatId) -> Mutability {
|
fn pat_bound_mutability(&self, pat: PatId) -> Mutability {
|
||||||
let mut r = Mutability::Not;
|
let mut r = Mutability::Not;
|
||||||
self.body.walk_bindings_in_pat(pat, |b| {
|
self.body.walk_bindings_in_pat(pat, |b| {
|
||||||
|
|
|
@ -56,7 +56,7 @@ impl PatLike for PatId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> InferenceContext<'a> {
|
impl InferenceContext<'_> {
|
||||||
/// Infers type for tuple struct pattern or its corresponding assignee expression.
|
/// Infers type for tuple struct pattern or its corresponding assignee expression.
|
||||||
///
|
///
|
||||||
/// Ellipses found in the original pattern or expression must be filtered out.
|
/// Ellipses found in the original pattern or expression must be filtered out.
|
||||||
|
@ -306,7 +306,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
self.result
|
self.result
|
||||||
.pat_adjustments
|
.pat_adjustments
|
||||||
.get(&pat)
|
.get(&pat)
|
||||||
.and_then(|x| x.first())
|
.and_then(|it| it.first())
|
||||||
.unwrap_or(&self.result.type_of_pat[pat])
|
.unwrap_or(&self.result.type_of_pat[pat])
|
||||||
.clone()
|
.clone()
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,7 @@ use crate::{
|
||||||
TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, VariableKind,
|
TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, VariableKind,
|
||||||
};
|
};
|
||||||
|
|
||||||
impl<'a> InferenceContext<'a> {
|
impl InferenceContext<'_> {
|
||||||
pub(super) fn canonicalize<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>(
|
pub(super) fn canonicalize<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>(
|
||||||
&mut self,
|
&mut self,
|
||||||
t: T,
|
t: T,
|
||||||
|
@ -91,7 +91,7 @@ pub(crate) fn unify(
|
||||||
let mut table = InferenceTable::new(db, env);
|
let mut table = InferenceTable::new(db, env);
|
||||||
let vars = Substitution::from_iter(
|
let vars = Substitution::from_iter(
|
||||||
Interner,
|
Interner,
|
||||||
tys.binders.iter(Interner).map(|x| match &x.kind {
|
tys.binders.iter(Interner).map(|it| match &it.kind {
|
||||||
chalk_ir::VariableKind::Ty(_) => {
|
chalk_ir::VariableKind::Ty(_) => {
|
||||||
GenericArgData::Ty(table.new_type_var()).intern(Interner)
|
GenericArgData::Ty(table.new_type_var()).intern(Interner)
|
||||||
}
|
}
|
||||||
|
@ -547,7 +547,7 @@ impl<'a> InferenceTable<'a> {
|
||||||
table: &'a mut InferenceTable<'b>,
|
table: &'a mut InferenceTable<'b>,
|
||||||
highest_known_var: InferenceVar,
|
highest_known_var: InferenceVar,
|
||||||
}
|
}
|
||||||
impl<'a, 'b> TypeFolder<Interner> for VarFudger<'a, 'b> {
|
impl TypeFolder<Interner> for VarFudger<'_, '_> {
|
||||||
fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
|
fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
@ -686,8 +686,8 @@ impl<'a> InferenceTable<'a> {
|
||||||
|
|
||||||
let mut arg_tys = vec![];
|
let mut arg_tys = vec![];
|
||||||
let arg_ty = TyBuilder::tuple(num_args)
|
let arg_ty = TyBuilder::tuple(num_args)
|
||||||
.fill(|x| {
|
.fill(|it| {
|
||||||
let arg = match x {
|
let arg = match it {
|
||||||
ParamKind::Type => self.new_type_var(),
|
ParamKind::Type => self.new_type_var(),
|
||||||
ParamKind::Const(ty) => {
|
ParamKind::Const(ty) => {
|
||||||
never!("Tuple with const parameter");
|
never!("Tuple with const parameter");
|
||||||
|
@ -753,7 +753,7 @@ impl<'a> InferenceTable<'a> {
|
||||||
{
|
{
|
||||||
fold_tys_and_consts(
|
fold_tys_and_consts(
|
||||||
ty,
|
ty,
|
||||||
|x, _| match x {
|
|it, _| match it {
|
||||||
Either::Left(ty) => Either::Left(self.insert_type_vars_shallow(ty)),
|
Either::Left(ty) => Either::Left(self.insert_type_vars_shallow(ty)),
|
||||||
Either::Right(c) => Either::Right(self.insert_const_vars_shallow(c)),
|
Either::Right(c) => Either::Right(self.insert_const_vars_shallow(c)),
|
||||||
},
|
},
|
||||||
|
@ -798,7 +798,7 @@ impl<'a> InferenceTable<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> fmt::Debug for InferenceTable<'a> {
|
impl fmt::Debug for InferenceTable<'_> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
f.debug_struct("InferenceTable").field("num_vars", &self.type_variable_table.len()).finish()
|
f.debug_struct("InferenceTable").field("num_vars", &self.type_variable_table.len()).finish()
|
||||||
}
|
}
|
||||||
|
@ -826,7 +826,7 @@ mod resolve {
|
||||||
pub(super) var_stack: &'a mut Vec<InferenceVar>,
|
pub(super) var_stack: &'a mut Vec<InferenceVar>,
|
||||||
pub(super) fallback: F,
|
pub(super) fallback: F,
|
||||||
}
|
}
|
||||||
impl<'a, 'b, F> TypeFolder<Interner> for Resolver<'a, 'b, F>
|
impl<F> TypeFolder<Interner> for Resolver<'_, '_, F>
|
||||||
where
|
where
|
||||||
F: Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg,
|
F: Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg,
|
||||||
{
|
{
|
||||||
|
|
|
@ -7,7 +7,7 @@ use hir_def::{
|
||||||
Abi, FieldsShape, Integer, LayoutCalculator, LayoutS, Primitive, ReprOptions, Scalar, Size,
|
Abi, FieldsShape, Integer, LayoutCalculator, LayoutS, Primitive, ReprOptions, Scalar, Size,
|
||||||
StructKind, TargetDataLayout, WrappingRange,
|
StructKind, TargetDataLayout, WrappingRange,
|
||||||
},
|
},
|
||||||
LocalEnumVariantId, LocalFieldId,
|
LocalEnumVariantId, LocalFieldId, StructId,
|
||||||
};
|
};
|
||||||
use la_arena::{Idx, RawIdx};
|
use la_arena::{Idx, RawIdx};
|
||||||
use stdx::never;
|
use stdx::never;
|
||||||
|
@ -24,8 +24,8 @@ pub use self::{
|
||||||
};
|
};
|
||||||
|
|
||||||
macro_rules! user_error {
|
macro_rules! user_error {
|
||||||
($x: expr) => {
|
($it: expr) => {
|
||||||
return Err(LayoutError::UserError(format!($x)))
|
return Err(LayoutError::UserError(format!($it)))
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -77,18 +77,101 @@ impl<'a> LayoutCalculator for LayoutCx<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: move this to the `rustc_abi`.
|
||||||
|
fn layout_of_simd_ty(
|
||||||
|
db: &dyn HirDatabase,
|
||||||
|
id: StructId,
|
||||||
|
subst: &Substitution,
|
||||||
|
krate: CrateId,
|
||||||
|
dl: &TargetDataLayout,
|
||||||
|
) -> Result<Arc<Layout>, LayoutError> {
|
||||||
|
let fields = db.field_types(id.into());
|
||||||
|
|
||||||
|
// Supported SIMD vectors are homogeneous ADTs with at least one field:
|
||||||
|
//
|
||||||
|
// * #[repr(simd)] struct S(T, T, T, T);
|
||||||
|
// * #[repr(simd)] struct S { it: T, y: T, z: T, w: T }
|
||||||
|
// * #[repr(simd)] struct S([T; 4])
|
||||||
|
//
|
||||||
|
// where T is a primitive scalar (integer/float/pointer).
|
||||||
|
|
||||||
|
let f0_ty = match fields.iter().next() {
|
||||||
|
Some(it) => it.1.clone().substitute(Interner, subst),
|
||||||
|
None => {
|
||||||
|
user_error!("simd type with zero fields");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// The element type and number of elements of the SIMD vector
|
||||||
|
// are obtained from:
|
||||||
|
//
|
||||||
|
// * the element type and length of the single array field, if
|
||||||
|
// the first field is of array type, or
|
||||||
|
//
|
||||||
|
// * the homogeneous field type and the number of fields.
|
||||||
|
let (e_ty, e_len, is_array) = if let TyKind::Array(e_ty, _) = f0_ty.kind(Interner) {
|
||||||
|
// Extract the number of elements from the layout of the array field:
|
||||||
|
let FieldsShape::Array { count, .. } = db.layout_of_ty(f0_ty.clone(), krate)?.fields else {
|
||||||
|
user_error!("Array with non array layout");
|
||||||
|
};
|
||||||
|
|
||||||
|
(e_ty.clone(), count, true)
|
||||||
|
} else {
|
||||||
|
// First ADT field is not an array:
|
||||||
|
(f0_ty, fields.iter().count() as u64, false)
|
||||||
|
};
|
||||||
|
|
||||||
|
// Compute the ABI of the element type:
|
||||||
|
let e_ly = db.layout_of_ty(e_ty, krate)?;
|
||||||
|
let Abi::Scalar(e_abi) = e_ly.abi else {
|
||||||
|
user_error!("simd type with inner non scalar type");
|
||||||
|
};
|
||||||
|
|
||||||
|
// Compute the size and alignment of the vector:
|
||||||
|
let size = e_ly.size.checked_mul(e_len, dl).ok_or(LayoutError::SizeOverflow)?;
|
||||||
|
let align = dl.vector_align(size);
|
||||||
|
let size = size.align_to(align.abi);
|
||||||
|
|
||||||
|
// Compute the placement of the vector fields:
|
||||||
|
let fields = if is_array {
|
||||||
|
FieldsShape::Arbitrary { offsets: [Size::ZERO].into(), memory_index: [0].into() }
|
||||||
|
} else {
|
||||||
|
FieldsShape::Array { stride: e_ly.size, count: e_len }
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Arc::new(Layout {
|
||||||
|
variants: Variants::Single { index: struct_variant_idx() },
|
||||||
|
fields,
|
||||||
|
abi: Abi::Vector { element: e_abi, count: e_len },
|
||||||
|
largest_niche: e_ly.largest_niche,
|
||||||
|
size,
|
||||||
|
align,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
pub fn layout_of_ty_query(
|
pub fn layout_of_ty_query(
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
ty: Ty,
|
ty: Ty,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
) -> Result<Arc<Layout>, LayoutError> {
|
) -> Result<Arc<Layout>, LayoutError> {
|
||||||
let Some(target) = db.target_data_layout(krate) else { return Err(LayoutError::TargetLayoutNotAvailable) };
|
let Some(target) = db.target_data_layout(krate) else {
|
||||||
|
return Err(LayoutError::TargetLayoutNotAvailable);
|
||||||
|
};
|
||||||
let cx = LayoutCx { krate, target: &target };
|
let cx = LayoutCx { krate, target: &target };
|
||||||
let dl = &*cx.current_data_layout();
|
let dl = &*cx.current_data_layout();
|
||||||
let trait_env = Arc::new(TraitEnvironment::empty(krate));
|
let trait_env = Arc::new(TraitEnvironment::empty(krate));
|
||||||
let ty = normalize(db, trait_env, ty.clone());
|
let ty = normalize(db, trait_env, ty.clone());
|
||||||
let result = match ty.kind(Interner) {
|
let result = match ty.kind(Interner) {
|
||||||
TyKind::Adt(AdtId(def), subst) => return db.layout_of_adt(*def, subst.clone(), krate),
|
TyKind::Adt(AdtId(def), subst) => {
|
||||||
|
if let hir_def::AdtId::StructId(s) = def {
|
||||||
|
let data = db.struct_data(*s);
|
||||||
|
let repr = data.repr.unwrap_or_default();
|
||||||
|
if repr.simd() {
|
||||||
|
return layout_of_simd_ty(db, *s, subst, krate, &target);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return db.layout_of_adt(*def, subst.clone(), krate);
|
||||||
|
}
|
||||||
TyKind::Scalar(s) => match s {
|
TyKind::Scalar(s) => match s {
|
||||||
chalk_ir::Scalar::Bool => Layout::scalar(
|
chalk_ir::Scalar::Bool => Layout::scalar(
|
||||||
dl,
|
dl,
|
||||||
|
@ -147,7 +230,7 @@ pub fn layout_of_ty_query(
|
||||||
.iter(Interner)
|
.iter(Interner)
|
||||||
.map(|k| db.layout_of_ty(k.assert_ty_ref(Interner).clone(), krate))
|
.map(|k| db.layout_of_ty(k.assert_ty_ref(Interner).clone(), krate))
|
||||||
.collect::<Result<Vec<_>, _>>()?;
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
let fields = fields.iter().map(|x| &**x).collect::<Vec<_>>();
|
let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>();
|
||||||
let fields = fields.iter().collect::<Vec<_>>();
|
let fields = fields.iter().collect::<Vec<_>>();
|
||||||
cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)?
|
cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)?
|
||||||
}
|
}
|
||||||
|
@ -265,14 +348,14 @@ pub fn layout_of_ty_query(
|
||||||
let (captures, _) = infer.closure_info(c);
|
let (captures, _) = infer.closure_info(c);
|
||||||
let fields = captures
|
let fields = captures
|
||||||
.iter()
|
.iter()
|
||||||
.map(|x| {
|
.map(|it| {
|
||||||
db.layout_of_ty(
|
db.layout_of_ty(
|
||||||
x.ty.clone().substitute(Interner, ClosureSubst(subst).parent_subst()),
|
it.ty.clone().substitute(Interner, ClosureSubst(subst).parent_subst()),
|
||||||
krate,
|
krate,
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.collect::<Result<Vec<_>, _>>()?;
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
let fields = fields.iter().map(|x| &**x).collect::<Vec<_>>();
|
let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>();
|
||||||
let fields = fields.iter().collect::<Vec<_>>();
|
let fields = fields.iter().collect::<Vec<_>>();
|
||||||
cx.univariant(dl, &fields, &ReprOptions::default(), StructKind::AlwaysSized)
|
cx.univariant(dl, &fields, &ReprOptions::default(), StructKind::AlwaysSized)
|
||||||
.ok_or(LayoutError::Unknown)?
|
.ok_or(LayoutError::Unknown)?
|
||||||
|
@ -315,7 +398,10 @@ fn struct_tail_erasing_lifetimes(db: &dyn HirDatabase, pointee: Ty) -> Ty {
|
||||||
let data = db.struct_data(*i);
|
let data = db.struct_data(*i);
|
||||||
let mut it = data.variant_data.fields().iter().rev();
|
let mut it = data.variant_data.fields().iter().rev();
|
||||||
match it.next() {
|
match it.next() {
|
||||||
Some((f, _)) => field_ty(db, (*i).into(), f, subst),
|
Some((f, _)) => {
|
||||||
|
let last_field_ty = field_ty(db, (*i).into(), f, subst);
|
||||||
|
struct_tail_erasing_lifetimes(db, last_field_ty)
|
||||||
|
}
|
||||||
None => pointee,
|
None => pointee,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,7 +31,9 @@ pub fn layout_of_adt_query(
|
||||||
subst: Substitution,
|
subst: Substitution,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
) -> Result<Arc<Layout>, LayoutError> {
|
) -> Result<Arc<Layout>, LayoutError> {
|
||||||
let Some(target) = db.target_data_layout(krate) else { return Err(LayoutError::TargetLayoutNotAvailable) };
|
let Some(target) = db.target_data_layout(krate) else {
|
||||||
|
return Err(LayoutError::TargetLayoutNotAvailable);
|
||||||
|
};
|
||||||
let cx = LayoutCx { krate, target: &target };
|
let cx = LayoutCx { krate, target: &target };
|
||||||
let dl = cx.current_data_layout();
|
let dl = cx.current_data_layout();
|
||||||
let handle_variant = |def: VariantId, var: &VariantData| {
|
let handle_variant = |def: VariantId, var: &VariantData| {
|
||||||
|
@ -70,9 +72,9 @@ pub fn layout_of_adt_query(
|
||||||
};
|
};
|
||||||
let variants = variants
|
let variants = variants
|
||||||
.iter()
|
.iter()
|
||||||
.map(|x| x.iter().map(|x| &**x).collect::<Vec<_>>())
|
.map(|it| it.iter().map(|it| &**it).collect::<Vec<_>>())
|
||||||
.collect::<SmallVec<[_; 1]>>();
|
.collect::<SmallVec<[_; 1]>>();
|
||||||
let variants = variants.iter().map(|x| x.iter().collect()).collect();
|
let variants = variants.iter().map(|it| it.iter().collect()).collect();
|
||||||
let result = if matches!(def, AdtId::UnionId(..)) {
|
let result = if matches!(def, AdtId::UnionId(..)) {
|
||||||
cx.layout_of_union(&repr, &variants).ok_or(LayoutError::Unknown)?
|
cx.layout_of_union(&repr, &variants).ok_or(LayoutError::Unknown)?
|
||||||
} else {
|
} else {
|
||||||
|
@ -103,7 +105,7 @@ pub fn layout_of_adt_query(
|
||||||
&& variants
|
&& variants
|
||||||
.iter()
|
.iter()
|
||||||
.next()
|
.next()
|
||||||
.and_then(|x| x.last().map(|x| x.is_unsized()))
|
.and_then(|it| it.last().map(|it| !it.is_unsized()))
|
||||||
.unwrap_or(true),
|
.unwrap_or(true),
|
||||||
)
|
)
|
||||||
.ok_or(LayoutError::SizeOverflow)?
|
.ok_or(LayoutError::SizeOverflow)?
|
||||||
|
@ -116,9 +118,9 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>,
|
||||||
let get = |name| {
|
let get = |name| {
|
||||||
let attr = attrs.by_key(name).tt_values();
|
let attr = attrs.by_key(name).tt_values();
|
||||||
for tree in attr {
|
for tree in attr {
|
||||||
if let Some(x) = tree.token_trees.first() {
|
if let Some(it) = tree.token_trees.first() {
|
||||||
if let Ok(x) = x.to_string().parse() {
|
if let Ok(it) = it.to_string().parse() {
|
||||||
return Bound::Included(x);
|
return Bound::Included(it);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -270,6 +270,20 @@ struct Goal(Foo<S>);
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn simd_types() {
|
||||||
|
check_size_and_align(
|
||||||
|
r#"
|
||||||
|
#[repr(simd)]
|
||||||
|
struct SimdType(i64, i64);
|
||||||
|
struct Goal(SimdType);
|
||||||
|
"#,
|
||||||
|
"",
|
||||||
|
16,
|
||||||
|
16,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn return_position_impl_trait() {
|
fn return_position_impl_trait() {
|
||||||
size_and_align_expr! {
|
size_and_align_expr! {
|
||||||
|
@ -343,6 +357,24 @@ fn return_position_impl_trait() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn unsized_ref() {
|
||||||
|
size_and_align! {
|
||||||
|
struct S1([u8]);
|
||||||
|
struct S2(S1);
|
||||||
|
struct S3(i32, str);
|
||||||
|
struct S4(u64, S3);
|
||||||
|
#[allow(dead_code)]
|
||||||
|
struct S5 {
|
||||||
|
field1: u8,
|
||||||
|
field2: i16,
|
||||||
|
field_last: S4,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Goal(&'static S1, &'static S2, &'static S3, &'static S4, &'static S5);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn enums() {
|
fn enums() {
|
||||||
size_and_align! {
|
size_and_align! {
|
||||||
|
@ -369,11 +401,11 @@ fn tuple() {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn non_zero() {
|
fn non_zero_and_non_null() {
|
||||||
size_and_align! {
|
size_and_align! {
|
||||||
minicore: non_zero, option;
|
minicore: non_zero, non_null, option;
|
||||||
use core::num::NonZeroU8;
|
use core::{num::NonZeroU8, ptr::NonNull};
|
||||||
struct Goal(Option<NonZeroU8>);
|
struct Goal(Option<NonZeroU8>, Option<NonNull<i32>>);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -432,3 +464,41 @@ fn enums_with_discriminants() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn core_mem_discriminant() {
|
||||||
|
size_and_align! {
|
||||||
|
minicore: discriminant;
|
||||||
|
struct S(i32, u64);
|
||||||
|
struct Goal(core::mem::Discriminant<S>);
|
||||||
|
}
|
||||||
|
size_and_align! {
|
||||||
|
minicore: discriminant;
|
||||||
|
#[repr(u32)]
|
||||||
|
enum S {
|
||||||
|
A,
|
||||||
|
B,
|
||||||
|
C,
|
||||||
|
}
|
||||||
|
struct Goal(core::mem::Discriminant<S>);
|
||||||
|
}
|
||||||
|
size_and_align! {
|
||||||
|
minicore: discriminant;
|
||||||
|
enum S {
|
||||||
|
A(i32),
|
||||||
|
B(i64),
|
||||||
|
C(u8),
|
||||||
|
}
|
||||||
|
struct Goal(core::mem::Discriminant<S>);
|
||||||
|
}
|
||||||
|
size_and_align! {
|
||||||
|
minicore: discriminant;
|
||||||
|
#[repr(C, u16)]
|
||||||
|
enum S {
|
||||||
|
A(i32),
|
||||||
|
B(i64) = 200,
|
||||||
|
C = 1000,
|
||||||
|
}
|
||||||
|
struct Goal(core::mem::Discriminant<S>);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -180,9 +180,16 @@ impl MemoryMap {
|
||||||
/// allocator function as `f` and it will return a mapping of old addresses to new addresses.
|
/// allocator function as `f` and it will return a mapping of old addresses to new addresses.
|
||||||
fn transform_addresses(
|
fn transform_addresses(
|
||||||
&self,
|
&self,
|
||||||
mut f: impl FnMut(&[u8]) -> Result<usize, MirEvalError>,
|
mut f: impl FnMut(&[u8], usize) -> Result<usize, MirEvalError>,
|
||||||
) -> Result<HashMap<usize, usize>, MirEvalError> {
|
) -> Result<HashMap<usize, usize>, MirEvalError> {
|
||||||
self.memory.iter().map(|x| Ok((*x.0, f(x.1)?))).collect()
|
self.memory
|
||||||
|
.iter()
|
||||||
|
.map(|x| {
|
||||||
|
let addr = *x.0;
|
||||||
|
let align = if addr == 0 { 64 } else { (addr - (addr & (addr - 1))).min(64) };
|
||||||
|
Ok((addr, f(x.1, align)?))
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get<'a>(&'a self, addr: usize, size: usize) -> Option<&'a [u8]> {
|
fn get<'a>(&'a self, addr: usize, size: usize) -> Option<&'a [u8]> {
|
||||||
|
|
|
@ -23,7 +23,7 @@ use hir_def::{
|
||||||
generics::{
|
generics::{
|
||||||
TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
|
TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
|
||||||
},
|
},
|
||||||
lang_item::{lang_attr, LangItem},
|
lang_item::LangItem,
|
||||||
nameres::MacroSubNs,
|
nameres::MacroSubNs,
|
||||||
path::{GenericArg, GenericArgs, ModPath, Path, PathKind, PathSegment, PathSegments},
|
path::{GenericArg, GenericArgs, ModPath, Path, PathKind, PathSegment, PathSegments},
|
||||||
resolver::{HasResolver, Resolver, TypeNs},
|
resolver::{HasResolver, Resolver, TypeNs},
|
||||||
|
@ -959,10 +959,10 @@ impl<'a> TyLoweringContext<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn lower_where_predicate(
|
pub(crate) fn lower_where_predicate(
|
||||||
&'a self,
|
&self,
|
||||||
where_predicate: &'a WherePredicate,
|
where_predicate: &WherePredicate,
|
||||||
ignore_bindings: bool,
|
ignore_bindings: bool,
|
||||||
) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
|
) -> impl Iterator<Item = QuantifiedWhereClause> {
|
||||||
match where_predicate {
|
match where_predicate {
|
||||||
WherePredicate::ForLifetime { target, bound, .. }
|
WherePredicate::ForLifetime { target, bound, .. }
|
||||||
| WherePredicate::TypeBound { target, bound } => {
|
| WherePredicate::TypeBound { target, bound } => {
|
||||||
|
@ -1012,7 +1012,7 @@ impl<'a> TyLoweringContext<'a> {
|
||||||
// (So ideally, we'd only ignore `~const Drop` here)
|
// (So ideally, we'd only ignore `~const Drop` here)
|
||||||
// - `Destruct` impls are built-in in 1.62 (current nightly as of 08-04-2022), so until
|
// - `Destruct` impls are built-in in 1.62 (current nightly as of 08-04-2022), so until
|
||||||
// the builtin impls are supported by Chalk, we ignore them here.
|
// the builtin impls are supported by Chalk, we ignore them here.
|
||||||
if let Some(lang) = lang_attr(self.db.upcast(), tr.hir_trait_id()) {
|
if let Some(lang) = self.db.lang_attr(tr.hir_trait_id().into()) {
|
||||||
if matches!(lang, LangItem::Drop | LangItem::Destruct) {
|
if matches!(lang, LangItem::Drop | LangItem::Destruct) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -534,7 +534,7 @@ impl ReceiverAdjustments {
|
||||||
let mut ty = table.resolve_ty_shallow(&ty);
|
let mut ty = table.resolve_ty_shallow(&ty);
|
||||||
let mut adjust = Vec::new();
|
let mut adjust = Vec::new();
|
||||||
for _ in 0..self.autoderefs {
|
for _ in 0..self.autoderefs {
|
||||||
match autoderef::autoderef_step(table, ty.clone()) {
|
match autoderef::autoderef_step(table, ty.clone(), true) {
|
||||||
None => {
|
None => {
|
||||||
never!("autoderef not possible for {:?}", ty);
|
never!("autoderef not possible for {:?}", ty);
|
||||||
ty = TyKind::Error.intern(Interner);
|
ty = TyKind::Error.intern(Interner);
|
||||||
|
@ -559,10 +559,10 @@ impl ReceiverAdjustments {
|
||||||
adjust.push(a);
|
adjust.push(a);
|
||||||
}
|
}
|
||||||
if self.unsize_array {
|
if self.unsize_array {
|
||||||
ty = 'x: {
|
ty = 'it: {
|
||||||
if let TyKind::Ref(m, l, inner) = ty.kind(Interner) {
|
if let TyKind::Ref(m, l, inner) = ty.kind(Interner) {
|
||||||
if let TyKind::Array(inner, _) = inner.kind(Interner) {
|
if let TyKind::Array(inner, _) = inner.kind(Interner) {
|
||||||
break 'x TyKind::Ref(
|
break 'it TyKind::Ref(
|
||||||
m.clone(),
|
m.clone(),
|
||||||
l.clone(),
|
l.clone(),
|
||||||
TyKind::Slice(inner.clone()).intern(Interner),
|
TyKind::Slice(inner.clone()).intern(Interner),
|
||||||
|
@ -666,7 +666,7 @@ pub fn is_dyn_method(
|
||||||
let self_ty = trait_ref.self_type_parameter(Interner);
|
let self_ty = trait_ref.self_type_parameter(Interner);
|
||||||
if let TyKind::Dyn(d) = self_ty.kind(Interner) {
|
if let TyKind::Dyn(d) = self_ty.kind(Interner) {
|
||||||
let is_my_trait_in_bounds =
|
let is_my_trait_in_bounds =
|
||||||
d.bounds.skip_binders().as_slice(Interner).iter().any(|x| match x.skip_binders() {
|
d.bounds.skip_binders().as_slice(Interner).iter().any(|it| match it.skip_binders() {
|
||||||
// rustc doesn't accept `impl Foo<2> for dyn Foo<5>`, so if the trait id is equal, no matter
|
// rustc doesn't accept `impl Foo<2> for dyn Foo<5>`, so if the trait id is equal, no matter
|
||||||
// what the generics are, we are sure that the method is come from the vtable.
|
// what the generics are, we are sure that the method is come from the vtable.
|
||||||
WhereClause::Implemented(tr) => tr.trait_id == trait_ref.trait_id,
|
WhereClause::Implemented(tr) => tr.trait_id == trait_ref.trait_id,
|
||||||
|
@ -682,14 +682,14 @@ pub fn is_dyn_method(
|
||||||
/// Looks up the impl method that actually runs for the trait method `func`.
|
/// Looks up the impl method that actually runs for the trait method `func`.
|
||||||
///
|
///
|
||||||
/// Returns `func` if it's not a method defined in a trait or the lookup failed.
|
/// Returns `func` if it's not a method defined in a trait or the lookup failed.
|
||||||
pub fn lookup_impl_method(
|
pub(crate) fn lookup_impl_method_query(
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
env: Arc<TraitEnvironment>,
|
env: Arc<TraitEnvironment>,
|
||||||
func: FunctionId,
|
func: FunctionId,
|
||||||
fn_subst: Substitution,
|
fn_subst: Substitution,
|
||||||
) -> (FunctionId, Substitution) {
|
) -> (FunctionId, Substitution) {
|
||||||
let ItemContainerId::TraitId(trait_id) = func.lookup(db.upcast()).container else {
|
let ItemContainerId::TraitId(trait_id) = func.lookup(db.upcast()).container else {
|
||||||
return (func, fn_subst)
|
return (func, fn_subst);
|
||||||
};
|
};
|
||||||
let trait_params = db.generic_params(trait_id.into()).type_or_consts.len();
|
let trait_params = db.generic_params(trait_id.into()).type_or_consts.len();
|
||||||
let fn_params = fn_subst.len(Interner) - trait_params;
|
let fn_params = fn_subst.len(Interner) - trait_params;
|
||||||
|
@ -699,8 +699,8 @@ pub fn lookup_impl_method(
|
||||||
};
|
};
|
||||||
|
|
||||||
let name = &db.function_data(func).name;
|
let name = &db.function_data(func).name;
|
||||||
let Some((impl_fn, impl_subst)) = lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name)
|
let Some((impl_fn, impl_subst)) =
|
||||||
.and_then(|assoc| {
|
lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name).and_then(|assoc| {
|
||||||
if let (AssocItemId::FunctionId(id), subst) = assoc {
|
if let (AssocItemId::FunctionId(id), subst) = assoc {
|
||||||
Some((id, subst))
|
Some((id, subst))
|
||||||
} else {
|
} else {
|
||||||
|
@ -731,7 +731,7 @@ fn lookup_impl_assoc_item_for_trait_ref(
|
||||||
let impls = db.trait_impls_in_deps(env.krate);
|
let impls = db.trait_impls_in_deps(env.krate);
|
||||||
let self_impls = match self_ty.kind(Interner) {
|
let self_impls = match self_ty.kind(Interner) {
|
||||||
TyKind::Adt(id, _) => {
|
TyKind::Adt(id, _) => {
|
||||||
id.0.module(db.upcast()).containing_block().map(|x| db.trait_impls_in_block(x))
|
id.0.module(db.upcast()).containing_block().map(|it| db.trait_impls_in_block(it))
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
|
@ -895,8 +895,8 @@ pub fn iterate_method_candidates_dyn(
|
||||||
// (just as rustc does an autoderef and then autoref again).
|
// (just as rustc does an autoderef and then autoref again).
|
||||||
|
|
||||||
// We have to be careful about the order we're looking at candidates
|
// We have to be careful about the order we're looking at candidates
|
||||||
// in here. Consider the case where we're resolving `x.clone()`
|
// in here. Consider the case where we're resolving `it.clone()`
|
||||||
// where `x: &Vec<_>`. This resolves to the clone method with self
|
// where `it: &Vec<_>`. This resolves to the clone method with self
|
||||||
// type `Vec<_>`, *not* `&_`. I.e. we need to consider methods where
|
// type `Vec<_>`, *not* `&_`. I.e. we need to consider methods where
|
||||||
// the receiver type exactly matches before cases where we have to
|
// the receiver type exactly matches before cases where we have to
|
||||||
// do autoref. But in the autoderef steps, the `&_` self type comes
|
// do autoref. But in the autoderef steps, the `&_` self type comes
|
||||||
|
@ -1012,8 +1012,8 @@ fn iterate_method_candidates_by_receiver(
|
||||||
let snapshot = table.snapshot();
|
let snapshot = table.snapshot();
|
||||||
// We're looking for methods with *receiver* type receiver_ty. These could
|
// We're looking for methods with *receiver* type receiver_ty. These could
|
||||||
// be found in any of the derefs of receiver_ty, so we have to go through
|
// be found in any of the derefs of receiver_ty, so we have to go through
|
||||||
// that.
|
// that, including raw derefs.
|
||||||
let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone());
|
let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone(), true);
|
||||||
while let Some((self_ty, _)) = autoderef.next() {
|
while let Some((self_ty, _)) = autoderef.next() {
|
||||||
iterate_inherent_methods(
|
iterate_inherent_methods(
|
||||||
&self_ty,
|
&self_ty,
|
||||||
|
@ -1028,7 +1028,7 @@ fn iterate_method_candidates_by_receiver(
|
||||||
|
|
||||||
table.rollback_to(snapshot);
|
table.rollback_to(snapshot);
|
||||||
|
|
||||||
let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone());
|
let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone(), true);
|
||||||
while let Some((self_ty, _)) = autoderef.next() {
|
while let Some((self_ty, _)) = autoderef.next() {
|
||||||
iterate_trait_method_candidates(
|
iterate_trait_method_candidates(
|
||||||
&self_ty,
|
&self_ty,
|
||||||
|
@ -1480,8 +1480,8 @@ fn generic_implements_goal(
|
||||||
.push(self_ty.value.clone())
|
.push(self_ty.value.clone())
|
||||||
.fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len())
|
.fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len())
|
||||||
.build();
|
.build();
|
||||||
kinds.extend(trait_ref.substitution.iter(Interner).skip(1).map(|x| {
|
kinds.extend(trait_ref.substitution.iter(Interner).skip(1).map(|it| {
|
||||||
let vk = match x.data(Interner) {
|
let vk = match it.data(Interner) {
|
||||||
chalk_ir::GenericArgData::Ty(_) => {
|
chalk_ir::GenericArgData::Ty(_) => {
|
||||||
chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)
|
chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)
|
||||||
}
|
}
|
||||||
|
@ -1504,7 +1504,7 @@ fn autoderef_method_receiver(
|
||||||
ty: Ty,
|
ty: Ty,
|
||||||
) -> Vec<(Canonical<Ty>, ReceiverAdjustments)> {
|
) -> Vec<(Canonical<Ty>, ReceiverAdjustments)> {
|
||||||
let mut deref_chain: Vec<_> = Vec::new();
|
let mut deref_chain: Vec<_> = Vec::new();
|
||||||
let mut autoderef = autoderef::Autoderef::new(table, ty);
|
let mut autoderef = autoderef::Autoderef::new(table, ty, true);
|
||||||
while let Some((ty, derefs)) = autoderef.next() {
|
while let Some((ty, derefs)) = autoderef.next() {
|
||||||
deref_chain.push((
|
deref_chain.push((
|
||||||
autoderef.table.canonicalize(ty).value,
|
autoderef.table.canonicalize(ty).value,
|
||||||
|
|
|
@ -3,9 +3,14 @@
|
||||||
use std::{fmt::Display, iter};
|
use std::{fmt::Display, iter};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
consteval::usize_const, db::HirDatabase, display::HirDisplay, infer::PointerCast,
|
consteval::usize_const,
|
||||||
lang_items::is_box, mapping::ToChalk, CallableDefId, ClosureId, Const, ConstScalar,
|
db::HirDatabase,
|
||||||
InferenceResult, Interner, MemoryMap, Substitution, Ty, TyKind,
|
display::HirDisplay,
|
||||||
|
infer::{normalize, PointerCast},
|
||||||
|
lang_items::is_box,
|
||||||
|
mapping::ToChalk,
|
||||||
|
CallableDefId, ClosureId, Const, ConstScalar, InferenceResult, Interner, MemoryMap,
|
||||||
|
Substitution, TraitEnvironment, Ty, TyKind,
|
||||||
};
|
};
|
||||||
use base_db::CrateId;
|
use base_db::CrateId;
|
||||||
use chalk_ir::Mutability;
|
use chalk_ir::Mutability;
|
||||||
|
@ -22,7 +27,9 @@ mod pretty;
|
||||||
mod monomorphization;
|
mod monomorphization;
|
||||||
|
|
||||||
pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason};
|
pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason};
|
||||||
pub use eval::{interpret_mir, pad16, Evaluator, MirEvalError, VTableMap};
|
pub use eval::{
|
||||||
|
interpret_mir, pad16, render_const_using_debug_impl, Evaluator, MirEvalError, VTableMap,
|
||||||
|
};
|
||||||
pub use lower::{
|
pub use lower::{
|
||||||
lower_to_mir, mir_body_for_closure_query, mir_body_query, mir_body_recover, MirLowerError,
|
lower_to_mir, mir_body_for_closure_query, mir_body_query, mir_body_recover, MirLowerError,
|
||||||
};
|
};
|
||||||
|
@ -32,6 +39,7 @@ pub use monomorphization::{
|
||||||
};
|
};
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
use stdx::{impl_from, never};
|
use stdx::{impl_from, never};
|
||||||
|
use triomphe::Arc;
|
||||||
|
|
||||||
use super::consteval::{intern_const_scalar, try_const_usize};
|
use super::consteval::{intern_const_scalar, try_const_usize};
|
||||||
|
|
||||||
|
@ -129,11 +137,19 @@ pub enum ProjectionElem<V, T> {
|
||||||
impl<V, T> ProjectionElem<V, T> {
|
impl<V, T> ProjectionElem<V, T> {
|
||||||
pub fn projected_ty(
|
pub fn projected_ty(
|
||||||
&self,
|
&self,
|
||||||
base: Ty,
|
mut base: Ty,
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
closure_field: impl FnOnce(ClosureId, &Substitution, usize) -> Ty,
|
closure_field: impl FnOnce(ClosureId, &Substitution, usize) -> Ty,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
) -> Ty {
|
) -> Ty {
|
||||||
|
if matches!(base.data(Interner).kind, TyKind::Alias(_) | TyKind::AssociatedType(..)) {
|
||||||
|
base = normalize(
|
||||||
|
db,
|
||||||
|
// FIXME: we should get this from caller
|
||||||
|
Arc::new(TraitEnvironment::empty(krate)),
|
||||||
|
base,
|
||||||
|
);
|
||||||
|
}
|
||||||
match self {
|
match self {
|
||||||
ProjectionElem::Deref => match &base.data(Interner).kind {
|
ProjectionElem::Deref => match &base.data(Interner).kind {
|
||||||
TyKind::Raw(_, inner) | TyKind::Ref(_, _, inner) => inner.clone(),
|
TyKind::Raw(_, inner) | TyKind::Ref(_, _, inner) => inner.clone(),
|
||||||
|
@ -321,8 +337,8 @@ impl SwitchTargets {
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
pub struct Terminator {
|
pub struct Terminator {
|
||||||
span: MirSpan,
|
pub span: MirSpan,
|
||||||
kind: TerminatorKind,
|
pub kind: TerminatorKind,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
|
|
|
@ -52,7 +52,7 @@ fn all_mir_bodies(
|
||||||
let closures = body.closures.clone();
|
let closures = body.closures.clone();
|
||||||
Box::new(
|
Box::new(
|
||||||
iter::once(Ok(body))
|
iter::once(Ok(body))
|
||||||
.chain(closures.into_iter().flat_map(|x| for_closure(db, x))),
|
.chain(closures.into_iter().flat_map(|it| for_closure(db, it))),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
Err(e) => Box::new(iter::once(Err(e))),
|
Err(e) => Box::new(iter::once(Err(e))),
|
||||||
|
@ -62,7 +62,7 @@ fn all_mir_bodies(
|
||||||
Ok(body) => {
|
Ok(body) => {
|
||||||
let closures = body.closures.clone();
|
let closures = body.closures.clone();
|
||||||
Box::new(
|
Box::new(
|
||||||
iter::once(Ok(body)).chain(closures.into_iter().flat_map(|x| for_closure(db, x))),
|
iter::once(Ok(body)).chain(closures.into_iter().flat_map(|it| for_closure(db, it))),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
Err(e) => Box::new(iter::once(Err(e))),
|
Err(e) => Box::new(iter::once(Err(e))),
|
||||||
|
@ -171,7 +171,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
|
||||||
}
|
}
|
||||||
TerminatorKind::Call { func, args, .. } => {
|
TerminatorKind::Call { func, args, .. } => {
|
||||||
for_operand(func, terminator.span);
|
for_operand(func, terminator.span);
|
||||||
args.iter().for_each(|x| for_operand(x, terminator.span));
|
args.iter().for_each(|it| for_operand(it, terminator.span));
|
||||||
}
|
}
|
||||||
TerminatorKind::Assert { cond, .. } => {
|
TerminatorKind::Assert { cond, .. } => {
|
||||||
for_operand(cond, terminator.span);
|
for_operand(cond, terminator.span);
|
||||||
|
@ -245,7 +245,7 @@ fn ever_initialized_map(
|
||||||
body: &MirBody,
|
body: &MirBody,
|
||||||
) -> ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> {
|
) -> ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> {
|
||||||
let mut result: ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> =
|
let mut result: ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> =
|
||||||
body.basic_blocks.iter().map(|x| (x.0, ArenaMap::default())).collect();
|
body.basic_blocks.iter().map(|it| (it.0, ArenaMap::default())).collect();
|
||||||
fn dfs(
|
fn dfs(
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
body: &MirBody,
|
body: &MirBody,
|
||||||
|
@ -271,7 +271,10 @@ fn ever_initialized_map(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let Some(terminator) = &block.terminator else {
|
let Some(terminator) = &block.terminator else {
|
||||||
never!("Terminator should be none only in construction.\nThe body:\n{}", body.pretty_print(db));
|
never!(
|
||||||
|
"Terminator should be none only in construction.\nThe body:\n{}",
|
||||||
|
body.pretty_print(db)
|
||||||
|
);
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
let targets = match &terminator.kind {
|
let targets = match &terminator.kind {
|
||||||
|
@ -311,7 +314,7 @@ fn ever_initialized_map(
|
||||||
result[body.start_block].insert(l, true);
|
result[body.start_block].insert(l, true);
|
||||||
dfs(db, body, body.start_block, l, &mut result);
|
dfs(db, body, body.start_block, l, &mut result);
|
||||||
}
|
}
|
||||||
for l in body.locals.iter().map(|x| x.0) {
|
for l in body.locals.iter().map(|it| it.0) {
|
||||||
if !result[body.start_block].contains_idx(l) {
|
if !result[body.start_block].contains_idx(l) {
|
||||||
result[body.start_block].insert(l, false);
|
result[body.start_block].insert(l, false);
|
||||||
dfs(db, body, body.start_block, l, &mut result);
|
dfs(db, body, body.start_block, l, &mut result);
|
||||||
|
@ -325,10 +328,10 @@ fn mutability_of_locals(
|
||||||
body: &MirBody,
|
body: &MirBody,
|
||||||
) -> ArenaMap<LocalId, MutabilityReason> {
|
) -> ArenaMap<LocalId, MutabilityReason> {
|
||||||
let mut result: ArenaMap<LocalId, MutabilityReason> =
|
let mut result: ArenaMap<LocalId, MutabilityReason> =
|
||||||
body.locals.iter().map(|x| (x.0, MutabilityReason::Not)).collect();
|
body.locals.iter().map(|it| (it.0, MutabilityReason::Not)).collect();
|
||||||
let mut push_mut_span = |local, span| match &mut result[local] {
|
let mut push_mut_span = |local, span| match &mut result[local] {
|
||||||
MutabilityReason::Mut { spans } => spans.push(span),
|
MutabilityReason::Mut { spans } => spans.push(span),
|
||||||
x @ MutabilityReason::Not => *x = MutabilityReason::Mut { spans: vec![span] },
|
it @ MutabilityReason::Not => *it = MutabilityReason::Mut { spans: vec![span] },
|
||||||
};
|
};
|
||||||
let ever_init_maps = ever_initialized_map(db, body);
|
let ever_init_maps = ever_initialized_map(db, body);
|
||||||
for (block_id, mut ever_init_map) in ever_init_maps.into_iter() {
|
for (block_id, mut ever_init_map) in ever_init_maps.into_iter() {
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -3,20 +3,26 @@
|
||||||
|
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
|
|
||||||
|
use chalk_ir::TyKind;
|
||||||
|
use hir_def::resolver::HasResolver;
|
||||||
|
use hir_expand::mod_path::ModPath;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
mod simd;
|
||||||
|
|
||||||
macro_rules! from_bytes {
|
macro_rules! from_bytes {
|
||||||
($ty:tt, $value:expr) => {
|
($ty:tt, $value:expr) => {
|
||||||
($ty::from_le_bytes(match ($value).try_into() {
|
($ty::from_le_bytes(match ($value).try_into() {
|
||||||
Ok(x) => x,
|
Ok(it) => it,
|
||||||
Err(_) => return Err(MirEvalError::TypeError("mismatched size")),
|
Err(_) => return Err(MirEvalError::TypeError("mismatched size")),
|
||||||
}))
|
}))
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! not_supported {
|
macro_rules! not_supported {
|
||||||
($x: expr) => {
|
($it: expr) => {
|
||||||
return Err(MirEvalError::NotSupported(format!($x)))
|
return Err(MirEvalError::NotSupported(format!($it)))
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -26,7 +32,7 @@ impl Evaluator<'_> {
|
||||||
def: FunctionId,
|
def: FunctionId,
|
||||||
args: &[IntervalAndTy],
|
args: &[IntervalAndTy],
|
||||||
generic_args: &Substitution,
|
generic_args: &Substitution,
|
||||||
locals: &Locals<'_>,
|
locals: &Locals,
|
||||||
destination: Interval,
|
destination: Interval,
|
||||||
span: MirSpan,
|
span: MirSpan,
|
||||||
) -> Result<bool> {
|
) -> Result<bool> {
|
||||||
|
@ -53,6 +59,28 @@ impl Evaluator<'_> {
|
||||||
)?;
|
)?;
|
||||||
return Ok(true);
|
return Ok(true);
|
||||||
}
|
}
|
||||||
|
let is_platform_intrinsic = match &function_data.abi {
|
||||||
|
Some(abi) => *abi == Interned::new_str("platform-intrinsic"),
|
||||||
|
None => match def.lookup(self.db.upcast()).container {
|
||||||
|
hir_def::ItemContainerId::ExternBlockId(block) => {
|
||||||
|
let id = block.lookup(self.db.upcast()).id;
|
||||||
|
id.item_tree(self.db.upcast())[id.value].abi.as_deref()
|
||||||
|
== Some("platform-intrinsic")
|
||||||
|
}
|
||||||
|
_ => false,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
if is_platform_intrinsic {
|
||||||
|
self.exec_platform_intrinsic(
|
||||||
|
function_data.name.as_text().unwrap_or_default().as_str(),
|
||||||
|
args,
|
||||||
|
generic_args,
|
||||||
|
destination,
|
||||||
|
&locals,
|
||||||
|
span,
|
||||||
|
)?;
|
||||||
|
return Ok(true);
|
||||||
|
}
|
||||||
let is_extern_c = match def.lookup(self.db.upcast()).container {
|
let is_extern_c = match def.lookup(self.db.upcast()).container {
|
||||||
hir_def::ItemContainerId::ExternBlockId(block) => {
|
hir_def::ItemContainerId::ExternBlockId(block) => {
|
||||||
let id = block.lookup(self.db.upcast()).id;
|
let id = block.lookup(self.db.upcast()).id;
|
||||||
|
@ -74,25 +102,25 @@ impl Evaluator<'_> {
|
||||||
let alloc_fn = function_data
|
let alloc_fn = function_data
|
||||||
.attrs
|
.attrs
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|x| x.path().as_ident())
|
.filter_map(|it| it.path().as_ident())
|
||||||
.filter_map(|x| x.as_str())
|
.filter_map(|it| it.as_str())
|
||||||
.find(|x| {
|
.find(|it| {
|
||||||
[
|
[
|
||||||
"rustc_allocator",
|
"rustc_allocator",
|
||||||
"rustc_deallocator",
|
"rustc_deallocator",
|
||||||
"rustc_reallocator",
|
"rustc_reallocator",
|
||||||
"rustc_allocator_zeroed",
|
"rustc_allocator_zeroed",
|
||||||
]
|
]
|
||||||
.contains(x)
|
.contains(it)
|
||||||
});
|
});
|
||||||
if let Some(alloc_fn) = alloc_fn {
|
if let Some(alloc_fn) = alloc_fn {
|
||||||
self.exec_alloc_fn(alloc_fn, args, destination)?;
|
self.exec_alloc_fn(alloc_fn, args, destination)?;
|
||||||
return Ok(true);
|
return Ok(true);
|
||||||
}
|
}
|
||||||
if let Some(x) = self.detect_lang_function(def) {
|
if let Some(it) = self.detect_lang_function(def) {
|
||||||
let arg_bytes =
|
let arg_bytes =
|
||||||
args.iter().map(|x| Ok(x.get(&self)?.to_owned())).collect::<Result<Vec<_>>>()?;
|
args.iter().map(|it| Ok(it.get(&self)?.to_owned())).collect::<Result<Vec<_>>>()?;
|
||||||
let result = self.exec_lang_item(x, generic_args, &arg_bytes, locals, span)?;
|
let result = self.exec_lang_item(it, generic_args, &arg_bytes, locals, span)?;
|
||||||
destination.write_from_bytes(self, &result)?;
|
destination.write_from_bytes(self, &result)?;
|
||||||
return Ok(true);
|
return Ok(true);
|
||||||
}
|
}
|
||||||
|
@ -112,7 +140,7 @@ impl Evaluator<'_> {
|
||||||
};
|
};
|
||||||
let size = from_bytes!(usize, size.get(self)?);
|
let size = from_bytes!(usize, size.get(self)?);
|
||||||
let align = from_bytes!(usize, align.get(self)?);
|
let align = from_bytes!(usize, align.get(self)?);
|
||||||
let result = self.heap_allocate(size, align);
|
let result = self.heap_allocate(size, align)?;
|
||||||
destination.write_from_bytes(self, &result.to_bytes())?;
|
destination.write_from_bytes(self, &result.to_bytes())?;
|
||||||
}
|
}
|
||||||
"rustc_deallocator" => { /* no-op for now */ }
|
"rustc_deallocator" => { /* no-op for now */ }
|
||||||
|
@ -120,15 +148,19 @@ impl Evaluator<'_> {
|
||||||
let [ptr, old_size, align, new_size] = args else {
|
let [ptr, old_size, align, new_size] = args else {
|
||||||
return Err(MirEvalError::TypeError("rustc_allocator args are not provided"));
|
return Err(MirEvalError::TypeError("rustc_allocator args are not provided"));
|
||||||
};
|
};
|
||||||
let ptr = Address::from_bytes(ptr.get(self)?)?;
|
|
||||||
let old_size = from_bytes!(usize, old_size.get(self)?);
|
let old_size = from_bytes!(usize, old_size.get(self)?);
|
||||||
let new_size = from_bytes!(usize, new_size.get(self)?);
|
let new_size = from_bytes!(usize, new_size.get(self)?);
|
||||||
|
if old_size >= new_size {
|
||||||
|
destination.write_from_interval(self, ptr.interval)?;
|
||||||
|
} else {
|
||||||
|
let ptr = Address::from_bytes(ptr.get(self)?)?;
|
||||||
let align = from_bytes!(usize, align.get(self)?);
|
let align = from_bytes!(usize, align.get(self)?);
|
||||||
let result = self.heap_allocate(new_size, align);
|
let result = self.heap_allocate(new_size, align)?;
|
||||||
Interval { addr: result, size: old_size }
|
Interval { addr: result, size: old_size }
|
||||||
.write_from_interval(self, Interval { addr: ptr, size: old_size })?;
|
.write_from_interval(self, Interval { addr: ptr, size: old_size })?;
|
||||||
destination.write_from_bytes(self, &result.to_bytes())?;
|
destination.write_from_bytes(self, &result.to_bytes())?;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
_ => not_supported!("unknown alloc function"),
|
_ => not_supported!("unknown alloc function"),
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -136,7 +168,7 @@ impl Evaluator<'_> {
|
||||||
|
|
||||||
fn detect_lang_function(&self, def: FunctionId) -> Option<LangItem> {
|
fn detect_lang_function(&self, def: FunctionId) -> Option<LangItem> {
|
||||||
use LangItem::*;
|
use LangItem::*;
|
||||||
let candidate = lang_attr(self.db.upcast(), def)?;
|
let candidate = self.db.lang_attr(def.into())?;
|
||||||
// We want to execute these functions with special logic
|
// We want to execute these functions with special logic
|
||||||
if [PanicFmt, BeginPanic, SliceLen, DropInPlace].contains(&candidate) {
|
if [PanicFmt, BeginPanic, SliceLen, DropInPlace].contains(&candidate) {
|
||||||
return Some(candidate);
|
return Some(candidate);
|
||||||
|
@ -146,56 +178,35 @@ impl Evaluator<'_> {
|
||||||
|
|
||||||
fn exec_lang_item(
|
fn exec_lang_item(
|
||||||
&mut self,
|
&mut self,
|
||||||
x: LangItem,
|
it: LangItem,
|
||||||
generic_args: &Substitution,
|
generic_args: &Substitution,
|
||||||
args: &[Vec<u8>],
|
args: &[Vec<u8>],
|
||||||
locals: &Locals<'_>,
|
locals: &Locals,
|
||||||
span: MirSpan,
|
span: MirSpan,
|
||||||
) -> Result<Vec<u8>> {
|
) -> Result<Vec<u8>> {
|
||||||
use LangItem::*;
|
use LangItem::*;
|
||||||
let mut args = args.iter();
|
let mut args = args.iter();
|
||||||
match x {
|
match it {
|
||||||
BeginPanic => Err(MirEvalError::Panic("<unknown-panic-payload>".to_string())),
|
BeginPanic => Err(MirEvalError::Panic("<unknown-panic-payload>".to_string())),
|
||||||
PanicFmt => {
|
PanicFmt => {
|
||||||
let message = (|| {
|
let message = (|| {
|
||||||
let arguments_struct =
|
let resolver = self.db.crate_def_map(self.crate_id).crate_root().resolver(self.db.upcast());
|
||||||
self.db.lang_item(self.crate_id, LangItem::FormatArguments)?.as_struct()?;
|
let Some(format_fn) = resolver.resolve_path_in_value_ns_fully(
|
||||||
let arguments_layout = self
|
self.db.upcast(),
|
||||||
.layout_adt(arguments_struct.into(), Substitution::empty(Interner))
|
&hir_def::path::Path::from_known_path_with_no_generic(ModPath::from_segments(
|
||||||
.ok()?;
|
hir_expand::mod_path::PathKind::Abs,
|
||||||
let arguments_field_pieces =
|
[name![std], name![fmt], name![format]].into_iter(),
|
||||||
self.db.struct_data(arguments_struct).variant_data.field(&name![pieces])?;
|
)),
|
||||||
let pieces_offset = arguments_layout
|
) else {
|
||||||
.fields
|
not_supported!("std::fmt::format not found");
|
||||||
.offset(u32::from(arguments_field_pieces.into_raw()) as usize)
|
};
|
||||||
.bytes_usize();
|
let hir_def::resolver::ValueNs::FunctionId(format_fn) = format_fn else { not_supported!("std::fmt::format is not a function") };
|
||||||
let ptr_size = self.ptr_size();
|
let message_string = self.interpret_mir(self.db.mir_body(format_fn.into()).map_err(|e| MirEvalError::MirLowerError(format_fn, e))?, args.map(|x| IntervalOrOwned::Owned(x.clone())))?;
|
||||||
let arg = args.next()?;
|
let addr = Address::from_bytes(&message_string[self.ptr_size()..2 * self.ptr_size()])?;
|
||||||
let pieces_array_addr =
|
let size = from_bytes!(usize, message_string[2 * self.ptr_size()..]);
|
||||||
Address::from_bytes(&arg[pieces_offset..pieces_offset + ptr_size]).ok()?;
|
Ok(std::string::String::from_utf8_lossy(self.read_memory(addr, size)?).into_owned())
|
||||||
let pieces_array_len = usize::from_le_bytes(
|
|
||||||
(&arg[pieces_offset + ptr_size..pieces_offset + 2 * ptr_size])
|
|
||||||
.try_into()
|
|
||||||
.ok()?,
|
|
||||||
);
|
|
||||||
let mut message = "".to_string();
|
|
||||||
for i in 0..pieces_array_len {
|
|
||||||
let piece_ptr_addr = pieces_array_addr.offset(2 * i * ptr_size);
|
|
||||||
let piece_addr =
|
|
||||||
Address::from_bytes(self.read_memory(piece_ptr_addr, ptr_size).ok()?)
|
|
||||||
.ok()?;
|
|
||||||
let piece_len = usize::from_le_bytes(
|
|
||||||
self.read_memory(piece_ptr_addr.offset(ptr_size), ptr_size)
|
|
||||||
.ok()?
|
|
||||||
.try_into()
|
|
||||||
.ok()?,
|
|
||||||
);
|
|
||||||
let piece_data = self.read_memory(piece_addr, piece_len).ok()?;
|
|
||||||
message += &std::string::String::from_utf8_lossy(piece_data);
|
|
||||||
}
|
|
||||||
Some(message)
|
|
||||||
})()
|
})()
|
||||||
.unwrap_or_else(|| "<format-args-evaluation-failed>".to_string());
|
.unwrap_or_else(|e| format!("Failed to render panic format args: {e:?}"));
|
||||||
Err(MirEvalError::Panic(message))
|
Err(MirEvalError::Panic(message))
|
||||||
}
|
}
|
||||||
SliceLen => {
|
SliceLen => {
|
||||||
|
@ -207,7 +218,7 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
DropInPlace => {
|
DropInPlace => {
|
||||||
let ty =
|
let ty =
|
||||||
generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)).ok_or(
|
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)).ok_or(
|
||||||
MirEvalError::TypeError(
|
MirEvalError::TypeError(
|
||||||
"generic argument of drop_in_place is not provided",
|
"generic argument of drop_in_place is not provided",
|
||||||
),
|
),
|
||||||
|
@ -224,7 +235,35 @@ impl Evaluator<'_> {
|
||||||
)?;
|
)?;
|
||||||
Ok(vec![])
|
Ok(vec![])
|
||||||
}
|
}
|
||||||
x => not_supported!("Executing lang item {x:?}"),
|
it => not_supported!("Executing lang item {it:?}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn exec_syscall(
|
||||||
|
&mut self,
|
||||||
|
id: i64,
|
||||||
|
args: &[IntervalAndTy],
|
||||||
|
destination: Interval,
|
||||||
|
_locals: &Locals,
|
||||||
|
_span: MirSpan,
|
||||||
|
) -> Result<()> {
|
||||||
|
match id {
|
||||||
|
318 => {
|
||||||
|
// SYS_getrandom
|
||||||
|
let [buf, len, _flags] = args else {
|
||||||
|
return Err(MirEvalError::TypeError("SYS_getrandom args are not provided"));
|
||||||
|
};
|
||||||
|
let addr = Address::from_bytes(buf.get(self)?)?;
|
||||||
|
let size = from_bytes!(usize, len.get(self)?);
|
||||||
|
for i in 0..size {
|
||||||
|
let rand_byte = self.random_state.rand_u64() as u8;
|
||||||
|
self.write_memory(addr.offset(i), &[rand_byte])?;
|
||||||
|
}
|
||||||
|
destination.write_from_interval(self, len.interval)
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
not_supported!("Unknown syscall id {id:?}")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -234,8 +273,8 @@ impl Evaluator<'_> {
|
||||||
args: &[IntervalAndTy],
|
args: &[IntervalAndTy],
|
||||||
_generic_args: &Substitution,
|
_generic_args: &Substitution,
|
||||||
destination: Interval,
|
destination: Interval,
|
||||||
locals: &Locals<'_>,
|
locals: &Locals,
|
||||||
_span: MirSpan,
|
span: MirSpan,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
match as_str {
|
match as_str {
|
||||||
"memcmp" => {
|
"memcmp" => {
|
||||||
|
@ -299,7 +338,9 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"pthread_getspecific" => {
|
"pthread_getspecific" => {
|
||||||
let Some(arg0) = args.get(0) else {
|
let Some(arg0) = args.get(0) else {
|
||||||
return Err(MirEvalError::TypeError("pthread_getspecific arg0 is not provided"));
|
return Err(MirEvalError::TypeError(
|
||||||
|
"pthread_getspecific arg0 is not provided",
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
|
let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
|
||||||
let value = self.thread_local_storage.get_key(key)?;
|
let value = self.thread_local_storage.get_key(key)?;
|
||||||
|
@ -308,11 +349,15 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"pthread_setspecific" => {
|
"pthread_setspecific" => {
|
||||||
let Some(arg0) = args.get(0) else {
|
let Some(arg0) = args.get(0) else {
|
||||||
return Err(MirEvalError::TypeError("pthread_setspecific arg0 is not provided"));
|
return Err(MirEvalError::TypeError(
|
||||||
|
"pthread_setspecific arg0 is not provided",
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
|
let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
|
||||||
let Some(arg1) = args.get(1) else {
|
let Some(arg1) = args.get(1) else {
|
||||||
return Err(MirEvalError::TypeError("pthread_setspecific arg1 is not provided"));
|
return Err(MirEvalError::TypeError(
|
||||||
|
"pthread_setspecific arg1 is not provided",
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let value = from_bytes!(u128, pad16(arg1.get(self)?, false));
|
let value = from_bytes!(u128, pad16(arg1.get(self)?, false));
|
||||||
self.thread_local_storage.set_key(key, value)?;
|
self.thread_local_storage.set_key(key, value)?;
|
||||||
|
@ -326,17 +371,52 @@ impl Evaluator<'_> {
|
||||||
destination.write_from_bytes(self, &0u64.to_le_bytes()[0..destination.size])?;
|
destination.write_from_bytes(self, &0u64.to_le_bytes()[0..destination.size])?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
"syscall" => {
|
||||||
|
let Some((id, rest)) = args.split_first() else {
|
||||||
|
return Err(MirEvalError::TypeError(
|
||||||
|
"syscall arg1 is not provided",
|
||||||
|
));
|
||||||
|
};
|
||||||
|
let id = from_bytes!(i64, id.get(self)?);
|
||||||
|
self.exec_syscall(id, rest, destination, locals, span)
|
||||||
|
}
|
||||||
|
"sched_getaffinity" => {
|
||||||
|
let [_pid, _set_size, set] = args else {
|
||||||
|
return Err(MirEvalError::TypeError("libc::write args are not provided"));
|
||||||
|
};
|
||||||
|
let set = Address::from_bytes(set.get(self)?)?;
|
||||||
|
// Only enable core 0 (we are single threaded anyway), which is bitset 0x0000001
|
||||||
|
self.write_memory(set, &[1])?;
|
||||||
|
// return 0 as success
|
||||||
|
self.write_memory_using_ref(destination.addr, destination.size)?.fill(0);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
_ => not_supported!("unknown external function {as_str}"),
|
_ => not_supported!("unknown external function {as_str}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn exec_platform_intrinsic(
|
||||||
|
&mut self,
|
||||||
|
name: &str,
|
||||||
|
args: &[IntervalAndTy],
|
||||||
|
generic_args: &Substitution,
|
||||||
|
destination: Interval,
|
||||||
|
locals: &Locals,
|
||||||
|
span: MirSpan,
|
||||||
|
) -> Result<()> {
|
||||||
|
if let Some(name) = name.strip_prefix("simd_") {
|
||||||
|
return self.exec_simd_intrinsic(name, args, generic_args, destination, locals, span);
|
||||||
|
}
|
||||||
|
not_supported!("unknown platform intrinsic {name}");
|
||||||
|
}
|
||||||
|
|
||||||
fn exec_intrinsic(
|
fn exec_intrinsic(
|
||||||
&mut self,
|
&mut self,
|
||||||
name: &str,
|
name: &str,
|
||||||
args: &[IntervalAndTy],
|
args: &[IntervalAndTy],
|
||||||
generic_args: &Substitution,
|
generic_args: &Substitution,
|
||||||
destination: Interval,
|
destination: Interval,
|
||||||
locals: &Locals<'_>,
|
locals: &Locals,
|
||||||
span: MirSpan,
|
span: MirSpan,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
if let Some(name) = name.strip_prefix("atomic_") {
|
if let Some(name) = name.strip_prefix("atomic_") {
|
||||||
|
@ -347,7 +427,9 @@ impl Evaluator<'_> {
|
||||||
"sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
|
"sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
|
||||||
| "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => {
|
| "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => {
|
||||||
let [arg] = args else {
|
let [arg] = args else {
|
||||||
return Err(MirEvalError::TypeError("f64 intrinsic signature doesn't match fn (f64) -> f64"));
|
return Err(MirEvalError::TypeError(
|
||||||
|
"f64 intrinsic signature doesn't match fn (f64) -> f64",
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let arg = from_bytes!(f64, arg.get(self)?);
|
let arg = from_bytes!(f64, arg.get(self)?);
|
||||||
match name {
|
match name {
|
||||||
|
@ -373,7 +455,9 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"pow" | "minnum" | "maxnum" | "copysign" => {
|
"pow" | "minnum" | "maxnum" | "copysign" => {
|
||||||
let [arg1, arg2] = args else {
|
let [arg1, arg2] = args else {
|
||||||
return Err(MirEvalError::TypeError("f64 intrinsic signature doesn't match fn (f64, f64) -> f64"));
|
return Err(MirEvalError::TypeError(
|
||||||
|
"f64 intrinsic signature doesn't match fn (f64, f64) -> f64",
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let arg1 = from_bytes!(f64, arg1.get(self)?);
|
let arg1 = from_bytes!(f64, arg1.get(self)?);
|
||||||
let arg2 = from_bytes!(f64, arg2.get(self)?);
|
let arg2 = from_bytes!(f64, arg2.get(self)?);
|
||||||
|
@ -387,7 +471,9 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"powi" => {
|
"powi" => {
|
||||||
let [arg1, arg2] = args else {
|
let [arg1, arg2] = args else {
|
||||||
return Err(MirEvalError::TypeError("powif64 signature doesn't match fn (f64, i32) -> f64"));
|
return Err(MirEvalError::TypeError(
|
||||||
|
"powif64 signature doesn't match fn (f64, i32) -> f64",
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let arg1 = from_bytes!(f64, arg1.get(self)?);
|
let arg1 = from_bytes!(f64, arg1.get(self)?);
|
||||||
let arg2 = from_bytes!(i32, arg2.get(self)?);
|
let arg2 = from_bytes!(i32, arg2.get(self)?);
|
||||||
|
@ -395,7 +481,9 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"fma" => {
|
"fma" => {
|
||||||
let [arg1, arg2, arg3] = args else {
|
let [arg1, arg2, arg3] = args else {
|
||||||
return Err(MirEvalError::TypeError("fmaf64 signature doesn't match fn (f64, f64, f64) -> f64"));
|
return Err(MirEvalError::TypeError(
|
||||||
|
"fmaf64 signature doesn't match fn (f64, f64, f64) -> f64",
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let arg1 = from_bytes!(f64, arg1.get(self)?);
|
let arg1 = from_bytes!(f64, arg1.get(self)?);
|
||||||
let arg2 = from_bytes!(f64, arg2.get(self)?);
|
let arg2 = from_bytes!(f64, arg2.get(self)?);
|
||||||
|
@ -411,7 +499,9 @@ impl Evaluator<'_> {
|
||||||
"sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
|
"sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
|
||||||
| "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => {
|
| "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => {
|
||||||
let [arg] = args else {
|
let [arg] = args else {
|
||||||
return Err(MirEvalError::TypeError("f32 intrinsic signature doesn't match fn (f32) -> f32"));
|
return Err(MirEvalError::TypeError(
|
||||||
|
"f32 intrinsic signature doesn't match fn (f32) -> f32",
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let arg = from_bytes!(f32, arg.get(self)?);
|
let arg = from_bytes!(f32, arg.get(self)?);
|
||||||
match name {
|
match name {
|
||||||
|
@ -437,7 +527,9 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"pow" | "minnum" | "maxnum" | "copysign" => {
|
"pow" | "minnum" | "maxnum" | "copysign" => {
|
||||||
let [arg1, arg2] = args else {
|
let [arg1, arg2] = args else {
|
||||||
return Err(MirEvalError::TypeError("f32 intrinsic signature doesn't match fn (f32, f32) -> f32"));
|
return Err(MirEvalError::TypeError(
|
||||||
|
"f32 intrinsic signature doesn't match fn (f32, f32) -> f32",
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let arg1 = from_bytes!(f32, arg1.get(self)?);
|
let arg1 = from_bytes!(f32, arg1.get(self)?);
|
||||||
let arg2 = from_bytes!(f32, arg2.get(self)?);
|
let arg2 = from_bytes!(f32, arg2.get(self)?);
|
||||||
|
@ -451,7 +543,9 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"powi" => {
|
"powi" => {
|
||||||
let [arg1, arg2] = args else {
|
let [arg1, arg2] = args else {
|
||||||
return Err(MirEvalError::TypeError("powif32 signature doesn't match fn (f32, i32) -> f32"));
|
return Err(MirEvalError::TypeError(
|
||||||
|
"powif32 signature doesn't match fn (f32, i32) -> f32",
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let arg1 = from_bytes!(f32, arg1.get(self)?);
|
let arg1 = from_bytes!(f32, arg1.get(self)?);
|
||||||
let arg2 = from_bytes!(i32, arg2.get(self)?);
|
let arg2 = from_bytes!(i32, arg2.get(self)?);
|
||||||
|
@ -459,7 +553,9 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"fma" => {
|
"fma" => {
|
||||||
let [arg1, arg2, arg3] = args else {
|
let [arg1, arg2, arg3] = args else {
|
||||||
return Err(MirEvalError::TypeError("fmaf32 signature doesn't match fn (f32, f32, f32) -> f32"));
|
return Err(MirEvalError::TypeError(
|
||||||
|
"fmaf32 signature doesn't match fn (f32, f32, f32) -> f32",
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let arg1 = from_bytes!(f32, arg1.get(self)?);
|
let arg1 = from_bytes!(f32, arg1.get(self)?);
|
||||||
let arg2 = from_bytes!(f32, arg2.get(self)?);
|
let arg2 = from_bytes!(f32, arg2.get(self)?);
|
||||||
|
@ -472,21 +568,74 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
match name {
|
match name {
|
||||||
"size_of" => {
|
"size_of" => {
|
||||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
|
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
|
||||||
|
else {
|
||||||
return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
|
return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
|
||||||
};
|
};
|
||||||
let size = self.size_of_sized(ty, locals, "size_of arg")?;
|
let size = self.size_of_sized(ty, locals, "size_of arg")?;
|
||||||
destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size])
|
destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size])
|
||||||
}
|
}
|
||||||
"min_align_of" | "pref_align_of" => {
|
"min_align_of" | "pref_align_of" => {
|
||||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
|
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else {
|
||||||
return Err(MirEvalError::TypeError("align_of generic arg is not provided"));
|
return Err(MirEvalError::TypeError("align_of generic arg is not provided"));
|
||||||
};
|
};
|
||||||
let align = self.layout(ty)?.align.abi.bytes();
|
let align = self.layout(ty)?.align.abi.bytes();
|
||||||
destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size])
|
destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size])
|
||||||
}
|
}
|
||||||
|
"size_of_val" => {
|
||||||
|
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
|
||||||
|
else {
|
||||||
|
return Err(MirEvalError::TypeError("size_of_val generic arg is not provided"));
|
||||||
|
};
|
||||||
|
let [arg] = args else {
|
||||||
|
return Err(MirEvalError::TypeError("size_of_val args are not provided"));
|
||||||
|
};
|
||||||
|
if let Some((size, _)) = self.size_align_of(ty, locals)? {
|
||||||
|
destination.write_from_bytes(self, &size.to_le_bytes())
|
||||||
|
} else {
|
||||||
|
let metadata = arg.interval.slice(self.ptr_size()..self.ptr_size() * 2);
|
||||||
|
let (size, _) = self.size_align_of_unsized(ty, metadata, locals)?;
|
||||||
|
destination.write_from_bytes(self, &size.to_le_bytes())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"min_align_of_val" => {
|
||||||
|
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else {
|
||||||
|
return Err(MirEvalError::TypeError("min_align_of_val generic arg is not provided"));
|
||||||
|
};
|
||||||
|
let [arg] = args else {
|
||||||
|
return Err(MirEvalError::TypeError("min_align_of_val args are not provided"));
|
||||||
|
};
|
||||||
|
if let Some((_, align)) = self.size_align_of(ty, locals)? {
|
||||||
|
destination.write_from_bytes(self, &align.to_le_bytes())
|
||||||
|
} else {
|
||||||
|
let metadata = arg.interval.slice(self.ptr_size()..self.ptr_size() * 2);
|
||||||
|
let (_, align) = self.size_align_of_unsized(ty, metadata, locals)?;
|
||||||
|
destination.write_from_bytes(self, &align.to_le_bytes())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"type_name" => {
|
||||||
|
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
|
||||||
|
else {
|
||||||
|
return Err(MirEvalError::TypeError("type_name generic arg is not provided"));
|
||||||
|
};
|
||||||
|
let Ok(ty_name) = ty.display_source_code(
|
||||||
|
self.db,
|
||||||
|
locals.body.owner.module(self.db.upcast()),
|
||||||
|
true,
|
||||||
|
) else {
|
||||||
|
not_supported!("fail in generating type_name using source code display");
|
||||||
|
};
|
||||||
|
let len = ty_name.len();
|
||||||
|
let addr = self.heap_allocate(len, 1)?;
|
||||||
|
self.write_memory(addr, ty_name.as_bytes())?;
|
||||||
|
destination.slice(0..self.ptr_size()).write_from_bytes(self, &addr.to_bytes())?;
|
||||||
|
destination
|
||||||
|
.slice(self.ptr_size()..2 * self.ptr_size())
|
||||||
|
.write_from_bytes(self, &len.to_le_bytes())
|
||||||
|
}
|
||||||
"needs_drop" => {
|
"needs_drop" => {
|
||||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
|
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
|
||||||
|
else {
|
||||||
return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
|
return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
|
||||||
};
|
};
|
||||||
let result = !ty.clone().is_copy(self.db, locals.body.owner);
|
let result = !ty.clone().is_copy(self.db, locals.body.owner);
|
||||||
|
@ -501,13 +650,17 @@ impl Evaluator<'_> {
|
||||||
let ans = lhs.get(self)? == rhs.get(self)?;
|
let ans = lhs.get(self)? == rhs.get(self)?;
|
||||||
destination.write_from_bytes(self, &[u8::from(ans)])
|
destination.write_from_bytes(self, &[u8::from(ans)])
|
||||||
}
|
}
|
||||||
"saturating_add" => {
|
"saturating_add" | "saturating_sub" => {
|
||||||
let [lhs, rhs] = args else {
|
let [lhs, rhs] = args else {
|
||||||
return Err(MirEvalError::TypeError("saturating_add args are not provided"));
|
return Err(MirEvalError::TypeError("saturating_add args are not provided"));
|
||||||
};
|
};
|
||||||
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
|
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
|
||||||
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
|
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
|
||||||
let ans = lhs.saturating_add(rhs);
|
let ans = match name {
|
||||||
|
"saturating_add" => lhs.saturating_add(rhs),
|
||||||
|
"saturating_sub" => lhs.saturating_sub(rhs),
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
let bits = destination.size * 8;
|
let bits = destination.size * 8;
|
||||||
// FIXME: signed
|
// FIXME: signed
|
||||||
let is_signed = false;
|
let is_signed = false;
|
||||||
|
@ -544,6 +697,26 @@ impl Evaluator<'_> {
|
||||||
let ans = lhs.wrapping_mul(rhs);
|
let ans = lhs.wrapping_mul(rhs);
|
||||||
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
|
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
|
||||||
}
|
}
|
||||||
|
"wrapping_shl" | "unchecked_shl" => {
|
||||||
|
// FIXME: signed
|
||||||
|
let [lhs, rhs] = args else {
|
||||||
|
return Err(MirEvalError::TypeError("unchecked_shl args are not provided"));
|
||||||
|
};
|
||||||
|
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
|
||||||
|
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
|
||||||
|
let ans = lhs.wrapping_shl(rhs as u32);
|
||||||
|
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
|
||||||
|
}
|
||||||
|
"wrapping_shr" | "unchecked_shr" => {
|
||||||
|
// FIXME: signed
|
||||||
|
let [lhs, rhs] = args else {
|
||||||
|
return Err(MirEvalError::TypeError("unchecked_shr args are not provided"));
|
||||||
|
};
|
||||||
|
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
|
||||||
|
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
|
||||||
|
let ans = lhs.wrapping_shr(rhs as u32);
|
||||||
|
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
|
||||||
|
}
|
||||||
"unchecked_rem" => {
|
"unchecked_rem" => {
|
||||||
// FIXME: signed
|
// FIXME: signed
|
||||||
let [lhs, rhs] = args else {
|
let [lhs, rhs] = args else {
|
||||||
|
@ -588,7 +761,7 @@ impl Evaluator<'_> {
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
let is_overflow = u128overflow
|
let is_overflow = u128overflow
|
||||||
|| ans.to_le_bytes()[op_size..].iter().any(|&x| x != 0 && x != 255);
|
|| ans.to_le_bytes()[op_size..].iter().any(|&it| it != 0 && it != 255);
|
||||||
let is_overflow = vec![u8::from(is_overflow)];
|
let is_overflow = vec![u8::from(is_overflow)];
|
||||||
let layout = self.layout(&result_ty)?;
|
let layout = self.layout(&result_ty)?;
|
||||||
let result = self.make_by_layout(
|
let result = self.make_by_layout(
|
||||||
|
@ -603,10 +776,15 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"copy" | "copy_nonoverlapping" => {
|
"copy" | "copy_nonoverlapping" => {
|
||||||
let [src, dst, offset] = args else {
|
let [src, dst, offset] = args else {
|
||||||
return Err(MirEvalError::TypeError("copy_nonoverlapping args are not provided"));
|
return Err(MirEvalError::TypeError(
|
||||||
|
"copy_nonoverlapping args are not provided",
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
|
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
|
||||||
return Err(MirEvalError::TypeError("copy_nonoverlapping generic arg is not provided"));
|
else {
|
||||||
|
return Err(MirEvalError::TypeError(
|
||||||
|
"copy_nonoverlapping generic arg is not provided",
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let src = Address::from_bytes(src.get(self)?)?;
|
let src = Address::from_bytes(src.get(self)?)?;
|
||||||
let dst = Address::from_bytes(dst.get(self)?)?;
|
let dst = Address::from_bytes(dst.get(self)?)?;
|
||||||
|
@ -621,7 +799,8 @@ impl Evaluator<'_> {
|
||||||
let [ptr, offset] = args else {
|
let [ptr, offset] = args else {
|
||||||
return Err(MirEvalError::TypeError("offset args are not provided"));
|
return Err(MirEvalError::TypeError("offset args are not provided"));
|
||||||
};
|
};
|
||||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
|
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
|
||||||
|
else {
|
||||||
return Err(MirEvalError::TypeError("offset generic arg is not provided"));
|
return Err(MirEvalError::TypeError("offset generic arg is not provided"));
|
||||||
};
|
};
|
||||||
let ptr = u128::from_le_bytes(pad16(ptr.get(self)?, false));
|
let ptr = u128::from_le_bytes(pad16(ptr.get(self)?, false));
|
||||||
|
@ -652,20 +831,106 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"ctpop" => {
|
"ctpop" => {
|
||||||
let [arg] = args else {
|
let [arg] = args else {
|
||||||
return Err(MirEvalError::TypeError("likely arg is not provided"));
|
return Err(MirEvalError::TypeError("ctpop arg is not provided"));
|
||||||
};
|
};
|
||||||
let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).count_ones();
|
let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).count_ones();
|
||||||
destination
|
destination
|
||||||
.write_from_bytes(self, &(result as u128).to_le_bytes()[0..destination.size])
|
.write_from_bytes(self, &(result as u128).to_le_bytes()[0..destination.size])
|
||||||
}
|
}
|
||||||
|
"ctlz" | "ctlz_nonzero" => {
|
||||||
|
let [arg] = args else {
|
||||||
|
return Err(MirEvalError::TypeError("cttz arg is not provided"));
|
||||||
|
};
|
||||||
|
let result =
|
||||||
|
u128::from_le_bytes(pad16(arg.get(self)?, false)).leading_zeros() as usize;
|
||||||
|
let result = result - (128 - arg.interval.size * 8);
|
||||||
|
destination
|
||||||
|
.write_from_bytes(self, &(result as u128).to_le_bytes()[0..destination.size])
|
||||||
|
}
|
||||||
"cttz" | "cttz_nonzero" => {
|
"cttz" | "cttz_nonzero" => {
|
||||||
let [arg] = args else {
|
let [arg] = args else {
|
||||||
return Err(MirEvalError::TypeError("likely arg is not provided"));
|
return Err(MirEvalError::TypeError("cttz arg is not provided"));
|
||||||
};
|
};
|
||||||
let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).trailing_zeros();
|
let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).trailing_zeros();
|
||||||
destination
|
destination
|
||||||
.write_from_bytes(self, &(result as u128).to_le_bytes()[0..destination.size])
|
.write_from_bytes(self, &(result as u128).to_le_bytes()[0..destination.size])
|
||||||
}
|
}
|
||||||
|
"rotate_left" => {
|
||||||
|
let [lhs, rhs] = args else {
|
||||||
|
return Err(MirEvalError::TypeError("rotate_left args are not provided"));
|
||||||
|
};
|
||||||
|
let lhs = &lhs.get(self)?[0..destination.size];
|
||||||
|
let rhs = rhs.get(self)?[0] as u32;
|
||||||
|
match destination.size {
|
||||||
|
1 => {
|
||||||
|
let r = from_bytes!(u8, lhs).rotate_left(rhs);
|
||||||
|
destination.write_from_bytes(self, &r.to_le_bytes())
|
||||||
|
}
|
||||||
|
2 => {
|
||||||
|
let r = from_bytes!(u16, lhs).rotate_left(rhs);
|
||||||
|
destination.write_from_bytes(self, &r.to_le_bytes())
|
||||||
|
}
|
||||||
|
4 => {
|
||||||
|
let r = from_bytes!(u32, lhs).rotate_left(rhs);
|
||||||
|
destination.write_from_bytes(self, &r.to_le_bytes())
|
||||||
|
}
|
||||||
|
8 => {
|
||||||
|
let r = from_bytes!(u64, lhs).rotate_left(rhs);
|
||||||
|
destination.write_from_bytes(self, &r.to_le_bytes())
|
||||||
|
}
|
||||||
|
16 => {
|
||||||
|
let r = from_bytes!(u128, lhs).rotate_left(rhs);
|
||||||
|
destination.write_from_bytes(self, &r.to_le_bytes())
|
||||||
|
}
|
||||||
|
s => not_supported!("destination with size {s} for rotate_left"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"rotate_right" => {
|
||||||
|
let [lhs, rhs] = args else {
|
||||||
|
return Err(MirEvalError::TypeError("rotate_right args are not provided"));
|
||||||
|
};
|
||||||
|
let lhs = &lhs.get(self)?[0..destination.size];
|
||||||
|
let rhs = rhs.get(self)?[0] as u32;
|
||||||
|
match destination.size {
|
||||||
|
1 => {
|
||||||
|
let r = from_bytes!(u8, lhs).rotate_right(rhs);
|
||||||
|
destination.write_from_bytes(self, &r.to_le_bytes())
|
||||||
|
}
|
||||||
|
2 => {
|
||||||
|
let r = from_bytes!(u16, lhs).rotate_right(rhs);
|
||||||
|
destination.write_from_bytes(self, &r.to_le_bytes())
|
||||||
|
}
|
||||||
|
4 => {
|
||||||
|
let r = from_bytes!(u32, lhs).rotate_right(rhs);
|
||||||
|
destination.write_from_bytes(self, &r.to_le_bytes())
|
||||||
|
}
|
||||||
|
8 => {
|
||||||
|
let r = from_bytes!(u64, lhs).rotate_right(rhs);
|
||||||
|
destination.write_from_bytes(self, &r.to_le_bytes())
|
||||||
|
}
|
||||||
|
16 => {
|
||||||
|
let r = from_bytes!(u128, lhs).rotate_right(rhs);
|
||||||
|
destination.write_from_bytes(self, &r.to_le_bytes())
|
||||||
|
}
|
||||||
|
s => not_supported!("destination with size {s} for rotate_right"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"discriminant_value" => {
|
||||||
|
let [arg] = args else {
|
||||||
|
return Err(MirEvalError::TypeError("discriminant_value arg is not provided"));
|
||||||
|
};
|
||||||
|
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
|
||||||
|
else {
|
||||||
|
return Err(MirEvalError::TypeError(
|
||||||
|
"discriminant_value generic arg is not provided",
|
||||||
|
));
|
||||||
|
};
|
||||||
|
let addr = Address::from_bytes(arg.get(self)?)?;
|
||||||
|
let size = self.size_of_sized(ty, locals, "discriminant_value ptr type")?;
|
||||||
|
let interval = Interval { addr, size };
|
||||||
|
let r = self.compute_discriminant(ty.clone(), interval.get(self)?)?;
|
||||||
|
destination.write_from_bytes(self, &r.to_le_bytes()[0..destination.size])
|
||||||
|
}
|
||||||
"const_eval_select" => {
|
"const_eval_select" => {
|
||||||
let [tuple, const_fn, _] = args else {
|
let [tuple, const_fn, _] = args else {
|
||||||
return Err(MirEvalError::TypeError("const_eval_select args are not provided"));
|
return Err(MirEvalError::TypeError("const_eval_select args are not provided"));
|
||||||
|
@ -681,24 +946,119 @@ impl Evaluator<'_> {
|
||||||
let addr = tuple.interval.addr.offset(offset);
|
let addr = tuple.interval.addr.offset(offset);
|
||||||
args.push(IntervalAndTy::new(addr, field, self, locals)?);
|
args.push(IntervalAndTy::new(addr, field, self, locals)?);
|
||||||
}
|
}
|
||||||
self.exec_fn_trait(&args, destination, locals, span)
|
if let Some(target) = self.db.lang_item(self.crate_id, LangItem::FnOnce) {
|
||||||
|
if let Some(def) = target
|
||||||
|
.as_trait()
|
||||||
|
.and_then(|it| self.db.trait_data(it).method_by_name(&name![call_once]))
|
||||||
|
{
|
||||||
|
self.exec_fn_trait(
|
||||||
|
def,
|
||||||
|
&args,
|
||||||
|
// FIXME: wrong for manual impls of `FnOnce`
|
||||||
|
Substitution::empty(Interner),
|
||||||
|
locals,
|
||||||
|
destination,
|
||||||
|
None,
|
||||||
|
span,
|
||||||
|
)?;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
not_supported!("FnOnce was not available for executing const_eval_select");
|
||||||
|
}
|
||||||
|
"read_via_copy" | "volatile_load" => {
|
||||||
|
let [arg] = args else {
|
||||||
|
return Err(MirEvalError::TypeError("read_via_copy args are not provided"));
|
||||||
|
};
|
||||||
|
let addr = Address::from_bytes(arg.interval.get(self)?)?;
|
||||||
|
destination.write_from_interval(self, Interval { addr, size: destination.size })
|
||||||
|
}
|
||||||
|
"write_bytes" => {
|
||||||
|
let [dst, val, count] = args else {
|
||||||
|
return Err(MirEvalError::TypeError("write_bytes args are not provided"));
|
||||||
|
};
|
||||||
|
let count = from_bytes!(usize, count.get(self)?);
|
||||||
|
let val = from_bytes!(u8, val.get(self)?);
|
||||||
|
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
|
||||||
|
else {
|
||||||
|
return Err(MirEvalError::TypeError(
|
||||||
|
"write_bytes generic arg is not provided",
|
||||||
|
));
|
||||||
|
};
|
||||||
|
let dst = Address::from_bytes(dst.get(self)?)?;
|
||||||
|
let size = self.size_of_sized(ty, locals, "copy_nonoverlapping ptr type")?;
|
||||||
|
let size = count * size;
|
||||||
|
self.write_memory_using_ref(dst, size)?.fill(val);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
_ => not_supported!("unknown intrinsic {name}"),
|
_ => not_supported!("unknown intrinsic {name}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn size_align_of_unsized(
|
||||||
|
&mut self,
|
||||||
|
ty: &Ty,
|
||||||
|
metadata: Interval,
|
||||||
|
locals: &Locals,
|
||||||
|
) -> Result<(usize, usize)> {
|
||||||
|
Ok(match ty.kind(Interner) {
|
||||||
|
TyKind::Str => (from_bytes!(usize, metadata.get(self)?), 1),
|
||||||
|
TyKind::Slice(inner) => {
|
||||||
|
let len = from_bytes!(usize, metadata.get(self)?);
|
||||||
|
let (size, align) = self.size_align_of_sized(inner, locals, "slice inner type")?;
|
||||||
|
(size * len, align)
|
||||||
|
}
|
||||||
|
TyKind::Dyn(_) => self.size_align_of_sized(
|
||||||
|
self.vtable_map.ty_of_bytes(metadata.get(self)?)?,
|
||||||
|
locals,
|
||||||
|
"dyn concrete type",
|
||||||
|
)?,
|
||||||
|
TyKind::Adt(id, subst) => {
|
||||||
|
let id = id.0;
|
||||||
|
let layout = self.layout_adt(id, subst.clone())?;
|
||||||
|
let id = match id {
|
||||||
|
AdtId::StructId(s) => s,
|
||||||
|
_ => not_supported!("unsized enum or union"),
|
||||||
|
};
|
||||||
|
let field_types = &self.db.field_types(id.into());
|
||||||
|
let last_field_ty =
|
||||||
|
field_types.iter().rev().next().unwrap().1.clone().substitute(Interner, subst);
|
||||||
|
let sized_part_size =
|
||||||
|
layout.fields.offset(field_types.iter().count() - 1).bytes_usize();
|
||||||
|
let sized_part_align = layout.align.abi.bytes() as usize;
|
||||||
|
let (unsized_part_size, unsized_part_align) =
|
||||||
|
self.size_align_of_unsized(&last_field_ty, metadata, locals)?;
|
||||||
|
let align = sized_part_align.max(unsized_part_align) as isize;
|
||||||
|
let size = (sized_part_size + unsized_part_size) as isize;
|
||||||
|
// Must add any necessary padding to `size`
|
||||||
|
// (to make it a multiple of `align`) before returning it.
|
||||||
|
//
|
||||||
|
// Namely, the returned size should be, in C notation:
|
||||||
|
//
|
||||||
|
// `size + ((size & (align-1)) ? align : 0)`
|
||||||
|
//
|
||||||
|
// emulated via the semi-standard fast bit trick:
|
||||||
|
//
|
||||||
|
// `(size + (align-1)) & -align`
|
||||||
|
let size = (size + (align - 1)) & (-align);
|
||||||
|
(size as usize, align as usize)
|
||||||
|
}
|
||||||
|
_ => not_supported!("unsized type other than str, slice, struct and dyn"),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
fn exec_atomic_intrinsic(
|
fn exec_atomic_intrinsic(
|
||||||
&mut self,
|
&mut self,
|
||||||
name: &str,
|
name: &str,
|
||||||
args: &[IntervalAndTy],
|
args: &[IntervalAndTy],
|
||||||
generic_args: &Substitution,
|
generic_args: &Substitution,
|
||||||
destination: Interval,
|
destination: Interval,
|
||||||
locals: &Locals<'_>,
|
locals: &Locals,
|
||||||
_span: MirSpan,
|
_span: MirSpan,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
// We are a single threaded runtime with no UB checking and no optimization, so
|
// We are a single threaded runtime with no UB checking and no optimization, so
|
||||||
// we can implement these as normal functions.
|
// we can implement these as normal functions.
|
||||||
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
|
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else {
|
||||||
return Err(MirEvalError::TypeError("atomic intrinsic generic arg is not provided"));
|
return Err(MirEvalError::TypeError("atomic intrinsic generic arg is not provided"));
|
||||||
};
|
};
|
||||||
let Some(arg0) = args.get(0) else {
|
let Some(arg0) = args.get(0) else {
|
||||||
|
|
177
crates/hir-ty/src/mir/eval/shim/simd.rs
Normal file
177
crates/hir-ty/src/mir/eval/shim/simd.rs
Normal file
|
@ -0,0 +1,177 @@
|
||||||
|
//! Shim implementation for simd intrinsics
|
||||||
|
|
||||||
|
use std::cmp::Ordering;
|
||||||
|
|
||||||
|
use crate::TyKind;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
macro_rules! from_bytes {
|
||||||
|
($ty:tt, $value:expr) => {
|
||||||
|
($ty::from_le_bytes(match ($value).try_into() {
|
||||||
|
Ok(it) => it,
|
||||||
|
Err(_) => return Err(MirEvalError::TypeError("mismatched size")),
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! not_supported {
|
||||||
|
($it: expr) => {
|
||||||
|
return Err(MirEvalError::NotSupported(format!($it)))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Evaluator<'_> {
|
||||||
|
fn detect_simd_ty(&self, ty: &Ty) -> Result<(usize, Ty)> {
|
||||||
|
match ty.kind(Interner) {
|
||||||
|
TyKind::Adt(id, subst) => {
|
||||||
|
let len = match subst.as_slice(Interner).get(1).and_then(|it| it.constant(Interner))
|
||||||
|
{
|
||||||
|
Some(len) => len,
|
||||||
|
_ => {
|
||||||
|
if let AdtId::StructId(id) = id.0 {
|
||||||
|
let struct_data = self.db.struct_data(id);
|
||||||
|
let fields = struct_data.variant_data.fields();
|
||||||
|
let Some((first_field, _)) = fields.iter().next() else {
|
||||||
|
not_supported!("simd type with no field");
|
||||||
|
};
|
||||||
|
let field_ty = self.db.field_types(id.into())[first_field]
|
||||||
|
.clone()
|
||||||
|
.substitute(Interner, subst);
|
||||||
|
return Ok((fields.len(), field_ty));
|
||||||
|
}
|
||||||
|
return Err(MirEvalError::TypeError("simd type with no len param"));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
match try_const_usize(self.db, len) {
|
||||||
|
Some(len) => {
|
||||||
|
let Some(ty) = subst.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else {
|
||||||
|
return Err(MirEvalError::TypeError("simd type with no ty param"));
|
||||||
|
};
|
||||||
|
Ok((len as usize, ty.clone()))
|
||||||
|
}
|
||||||
|
None => Err(MirEvalError::TypeError("simd type with unevaluatable len param")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => Err(MirEvalError::TypeError("simd type which is not a struct")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn exec_simd_intrinsic(
|
||||||
|
&mut self,
|
||||||
|
name: &str,
|
||||||
|
args: &[IntervalAndTy],
|
||||||
|
_generic_args: &Substitution,
|
||||||
|
destination: Interval,
|
||||||
|
_locals: &Locals,
|
||||||
|
_span: MirSpan,
|
||||||
|
) -> Result<()> {
|
||||||
|
match name {
|
||||||
|
"and" | "or" | "xor" => {
|
||||||
|
let [left, right] = args else {
|
||||||
|
return Err(MirEvalError::TypeError("simd bit op args are not provided"));
|
||||||
|
};
|
||||||
|
let result = left
|
||||||
|
.get(self)?
|
||||||
|
.iter()
|
||||||
|
.zip(right.get(self)?)
|
||||||
|
.map(|(&it, &y)| match name {
|
||||||
|
"and" => it & y,
|
||||||
|
"or" => it | y,
|
||||||
|
"xor" => it ^ y,
|
||||||
|
_ => unreachable!(),
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
destination.write_from_bytes(self, &result)
|
||||||
|
}
|
||||||
|
"eq" | "ne" | "lt" | "le" | "gt" | "ge" => {
|
||||||
|
let [left, right] = args else {
|
||||||
|
return Err(MirEvalError::TypeError("simd args are not provided"));
|
||||||
|
};
|
||||||
|
let (len, ty) = self.detect_simd_ty(&left.ty)?;
|
||||||
|
let is_signed = matches!(ty.as_builtin(), Some(BuiltinType::Int(_)));
|
||||||
|
let size = left.interval.size / len;
|
||||||
|
let dest_size = destination.size / len;
|
||||||
|
let mut destination_bytes = vec![];
|
||||||
|
let vector = left.get(self)?.chunks(size).zip(right.get(self)?.chunks(size));
|
||||||
|
for (l, r) in vector {
|
||||||
|
let mut result = Ordering::Equal;
|
||||||
|
for (l, r) in l.iter().zip(r).rev() {
|
||||||
|
let it = l.cmp(r);
|
||||||
|
if it != Ordering::Equal {
|
||||||
|
result = it;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if is_signed {
|
||||||
|
if let Some((&l, &r)) = l.iter().zip(r).rev().next() {
|
||||||
|
if l != r {
|
||||||
|
result = (l as i8).cmp(&(r as i8));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let result = match result {
|
||||||
|
Ordering::Less => ["lt", "le", "ne"].contains(&name),
|
||||||
|
Ordering::Equal => ["ge", "le", "eq"].contains(&name),
|
||||||
|
Ordering::Greater => ["ge", "gt", "ne"].contains(&name),
|
||||||
|
};
|
||||||
|
let result = if result { 255 } else { 0 };
|
||||||
|
destination_bytes.extend(std::iter::repeat(result).take(dest_size));
|
||||||
|
}
|
||||||
|
|
||||||
|
destination.write_from_bytes(self, &destination_bytes)
|
||||||
|
}
|
||||||
|
"bitmask" => {
|
||||||
|
let [op] = args else {
|
||||||
|
return Err(MirEvalError::TypeError("simd_bitmask args are not provided"));
|
||||||
|
};
|
||||||
|
let (op_len, _) = self.detect_simd_ty(&op.ty)?;
|
||||||
|
let op_count = op.interval.size / op_len;
|
||||||
|
let mut result: u64 = 0;
|
||||||
|
for (i, val) in op.get(self)?.chunks(op_count).enumerate() {
|
||||||
|
if !val.iter().all(|&it| it == 0) {
|
||||||
|
result |= 1 << i;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
destination.write_from_bytes(self, &result.to_le_bytes()[0..destination.size])
|
||||||
|
}
|
||||||
|
"shuffle" => {
|
||||||
|
let [left, right, index] = args else {
|
||||||
|
return Err(MirEvalError::TypeError("simd_shuffle args are not provided"));
|
||||||
|
};
|
||||||
|
let TyKind::Array(_, index_len) = index.ty.kind(Interner) else {
|
||||||
|
return Err(MirEvalError::TypeError(
|
||||||
|
"simd_shuffle index argument has non-array type",
|
||||||
|
));
|
||||||
|
};
|
||||||
|
let index_len = match try_const_usize(self.db, index_len) {
|
||||||
|
Some(it) => it as usize,
|
||||||
|
None => {
|
||||||
|
return Err(MirEvalError::TypeError(
|
||||||
|
"simd type with unevaluatable len param",
|
||||||
|
))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let (left_len, _) = self.detect_simd_ty(&left.ty)?;
|
||||||
|
let left_size = left.interval.size / left_len;
|
||||||
|
let vector =
|
||||||
|
left.get(self)?.chunks(left_size).chain(right.get(self)?.chunks(left_size));
|
||||||
|
let mut result = vec![];
|
||||||
|
for index in index.get(self)?.chunks(index.interval.size / index_len) {
|
||||||
|
let index = from_bytes!(u32, index) as usize;
|
||||||
|
let val = match vector.clone().nth(index) {
|
||||||
|
Some(it) => it,
|
||||||
|
None => {
|
||||||
|
return Err(MirEvalError::TypeError(
|
||||||
|
"out of bound access in simd shuffle",
|
||||||
|
))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
result.extend(val);
|
||||||
|
}
|
||||||
|
destination.write_from_bytes(self, &result)
|
||||||
|
}
|
||||||
|
_ => not_supported!("unknown simd intrinsic {name}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -30,7 +30,7 @@ fn eval_main(db: &TestDB, file_id: FileId) -> Result<(String, String), MirEvalEr
|
||||||
db.trait_environment(func_id.into()),
|
db.trait_environment(func_id.into()),
|
||||||
)
|
)
|
||||||
.map_err(|e| MirEvalError::MirLowerError(func_id.into(), e))?;
|
.map_err(|e| MirEvalError::MirLowerError(func_id.into(), e))?;
|
||||||
let (result, stdout, stderr) = interpret_mir(db, &body, false);
|
let (result, stdout, stderr) = interpret_mir(db, body, false);
|
||||||
result?;
|
result?;
|
||||||
Ok((stdout, stderr))
|
Ok((stdout, stderr))
|
||||||
}
|
}
|
||||||
|
@ -613,6 +613,34 @@ fn main() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn syscalls() {
|
||||||
|
check_pass(
|
||||||
|
r#"
|
||||||
|
//- minicore: option
|
||||||
|
|
||||||
|
extern "C" {
|
||||||
|
pub unsafe extern "C" fn syscall(num: i64, ...) -> i64;
|
||||||
|
}
|
||||||
|
|
||||||
|
const SYS_getrandom: i64 = 318;
|
||||||
|
|
||||||
|
fn should_not_reach() {
|
||||||
|
_ // FIXME: replace this function with panic when that works
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let mut x: i32 = 0;
|
||||||
|
let r = syscall(SYS_getrandom, &mut x, 4usize, 0);
|
||||||
|
if r != 4 {
|
||||||
|
should_not_reach();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn posix_tls() {
|
fn posix_tls() {
|
||||||
check_pass(
|
check_pass(
|
||||||
|
|
|
@ -146,12 +146,12 @@ impl MirLowerError {
|
||||||
ConstEvalError::MirEvalError(e) => e.pretty_print(f, db, span_formatter)?,
|
ConstEvalError::MirEvalError(e) => e.pretty_print(f, db, span_formatter)?,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
MirLowerError::MissingFunctionDefinition(owner, x) => {
|
MirLowerError::MissingFunctionDefinition(owner, it) => {
|
||||||
let body = db.body(*owner);
|
let body = db.body(*owner);
|
||||||
writeln!(
|
writeln!(
|
||||||
f,
|
f,
|
||||||
"Missing function definition for {}",
|
"Missing function definition for {}",
|
||||||
body.pretty_print_expr(db.upcast(), *owner, *x)
|
body.pretty_print_expr(db.upcast(), *owner, *it)
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
MirLowerError::TypeMismatch(e) => {
|
MirLowerError::TypeMismatch(e) => {
|
||||||
|
@ -202,15 +202,15 @@ impl MirLowerError {
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! not_supported {
|
macro_rules! not_supported {
|
||||||
($x: expr) => {
|
($it: expr) => {
|
||||||
return Err(MirLowerError::NotSupported(format!($x)))
|
return Err(MirLowerError::NotSupported(format!($it)))
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! implementation_error {
|
macro_rules! implementation_error {
|
||||||
($x: expr) => {{
|
($it: expr) => {{
|
||||||
::stdx::never!("MIR lower implementation bug: {}", format!($x));
|
::stdx::never!("MIR lower implementation bug: {}", format!($it));
|
||||||
return Err(MirLowerError::ImplementationError(format!($x)));
|
return Err(MirLowerError::ImplementationError(format!($it)));
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -310,14 +310,18 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
self.lower_expr_to_place_with_adjust(expr_id, temp.into(), current, rest)
|
self.lower_expr_to_place_with_adjust(expr_id, temp.into(), current, rest)
|
||||||
}
|
}
|
||||||
Adjust::Deref(_) => {
|
Adjust::Deref(_) => {
|
||||||
let Some((p, current)) = self.lower_expr_as_place_with_adjust(current, expr_id, true, adjustments)? else {
|
let Some((p, current)) =
|
||||||
|
self.lower_expr_as_place_with_adjust(current, expr_id, true, adjustments)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into());
|
self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into());
|
||||||
Ok(Some(current))
|
Ok(Some(current))
|
||||||
}
|
}
|
||||||
Adjust::Borrow(AutoBorrow::Ref(m) | AutoBorrow::RawPtr(m)) => {
|
Adjust::Borrow(AutoBorrow::Ref(m) | AutoBorrow::RawPtr(m)) => {
|
||||||
let Some((p, current)) = self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)? else {
|
let Some((p, current)) =
|
||||||
|
self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let bk = BorrowKind::from_chalk(*m);
|
let bk = BorrowKind::from_chalk(*m);
|
||||||
|
@ -325,7 +329,9 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
Ok(Some(current))
|
Ok(Some(current))
|
||||||
}
|
}
|
||||||
Adjust::Pointer(cast) => {
|
Adjust::Pointer(cast) => {
|
||||||
let Some((p, current)) = self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)? else {
|
let Some((p, current)) =
|
||||||
|
self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
self.push_assignment(
|
self.push_assignment(
|
||||||
|
@ -373,30 +379,32 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(MirLowerError::IncompleteExpr)
|
Err(MirLowerError::IncompleteExpr)
|
||||||
},
|
}
|
||||||
Expr::Path(p) => {
|
Expr::Path(p) => {
|
||||||
let pr = if let Some((assoc, subst)) = self
|
let pr =
|
||||||
.infer
|
if let Some((assoc, subst)) = self.infer.assoc_resolutions_for_expr(expr_id) {
|
||||||
.assoc_resolutions_for_expr(expr_id)
|
|
||||||
{
|
|
||||||
match assoc {
|
match assoc {
|
||||||
hir_def::AssocItemId::ConstId(c) => {
|
hir_def::AssocItemId::ConstId(c) => {
|
||||||
self.lower_const(c.into(), current, place, subst, expr_id.into(), self.expr_ty_without_adjust(expr_id))?;
|
self.lower_const(
|
||||||
return Ok(Some(current))
|
c.into(),
|
||||||
},
|
current,
|
||||||
|
place,
|
||||||
|
subst,
|
||||||
|
expr_id.into(),
|
||||||
|
self.expr_ty_without_adjust(expr_id),
|
||||||
|
)?;
|
||||||
|
return Ok(Some(current));
|
||||||
|
}
|
||||||
hir_def::AssocItemId::FunctionId(_) => {
|
hir_def::AssocItemId::FunctionId(_) => {
|
||||||
// FnDefs are zero sized, no action is needed.
|
// FnDefs are zero sized, no action is needed.
|
||||||
return Ok(Some(current))
|
return Ok(Some(current));
|
||||||
}
|
}
|
||||||
hir_def::AssocItemId::TypeAliasId(_) => {
|
hir_def::AssocItemId::TypeAliasId(_) => {
|
||||||
// FIXME: If it is unreachable, use proper error instead of `not_supported`.
|
// FIXME: If it is unreachable, use proper error instead of `not_supported`.
|
||||||
not_supported!("associated functions and types")
|
not_supported!("associated functions and types")
|
||||||
},
|
|
||||||
}
|
}
|
||||||
} else if let Some(variant) = self
|
}
|
||||||
.infer
|
} else if let Some(variant) = self.infer.variant_resolution_for_expr(expr_id) {
|
||||||
.variant_resolution_for_expr(expr_id)
|
|
||||||
{
|
|
||||||
match variant {
|
match variant {
|
||||||
VariantId::EnumVariantId(e) => ValueNs::EnumVariantId(e),
|
VariantId::EnumVariantId(e) => ValueNs::EnumVariantId(e),
|
||||||
VariantId::StructId(s) => ValueNs::StructId(s),
|
VariantId::StructId(s) => ValueNs::StructId(s),
|
||||||
|
@ -411,7 +419,9 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
};
|
};
|
||||||
match pr {
|
match pr {
|
||||||
ValueNs::LocalBinding(_) | ValueNs::StaticId(_) => {
|
ValueNs::LocalBinding(_) | ValueNs::StaticId(_) => {
|
||||||
let Some((temp, current)) = self.lower_expr_as_place_without_adjust(current, expr_id, false)? else {
|
let Some((temp, current)) =
|
||||||
|
self.lower_expr_as_place_without_adjust(current, expr_id, false)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
self.push_assignment(
|
self.push_assignment(
|
||||||
|
@ -423,11 +433,19 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
Ok(Some(current))
|
Ok(Some(current))
|
||||||
}
|
}
|
||||||
ValueNs::ConstId(const_id) => {
|
ValueNs::ConstId(const_id) => {
|
||||||
self.lower_const(const_id.into(), current, place, Substitution::empty(Interner), expr_id.into(), self.expr_ty_without_adjust(expr_id))?;
|
self.lower_const(
|
||||||
|
const_id.into(),
|
||||||
|
current,
|
||||||
|
place,
|
||||||
|
Substitution::empty(Interner),
|
||||||
|
expr_id.into(),
|
||||||
|
self.expr_ty_without_adjust(expr_id),
|
||||||
|
)?;
|
||||||
Ok(Some(current))
|
Ok(Some(current))
|
||||||
}
|
}
|
||||||
ValueNs::EnumVariantId(variant_id) => {
|
ValueNs::EnumVariantId(variant_id) => {
|
||||||
let variant_data = &self.db.enum_data(variant_id.parent).variants[variant_id.local_id];
|
let variant_data =
|
||||||
|
&self.db.enum_data(variant_id.parent).variants[variant_id.local_id];
|
||||||
if variant_data.variant_data.kind() == StructKind::Unit {
|
if variant_data.variant_data.kind() == StructKind::Unit {
|
||||||
let ty = self.infer.type_of_expr[expr_id].clone();
|
let ty = self.infer.type_of_expr[expr_id].clone();
|
||||||
current = self.lower_enum_variant(
|
current = self.lower_enum_variant(
|
||||||
|
@ -472,13 +490,15 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
// It's probably a unit struct or a zero sized function, so no action is needed.
|
// It's probably a unit struct or a zero sized function, so no action is needed.
|
||||||
Ok(Some(current))
|
Ok(Some(current))
|
||||||
}
|
}
|
||||||
x => {
|
it => {
|
||||||
not_supported!("unknown name {x:?} in value name space");
|
not_supported!("unknown name {it:?} in value name space");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Expr::If { condition, then_branch, else_branch } => {
|
Expr::If { condition, then_branch, else_branch } => {
|
||||||
let Some((discr, current)) = self.lower_expr_to_some_operand(*condition, current)? else {
|
let Some((discr, current)) =
|
||||||
|
self.lower_expr_to_some_operand(*condition, current)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let start_of_then = self.new_basic_block();
|
let start_of_then = self.new_basic_block();
|
||||||
|
@ -501,15 +521,12 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
Ok(self.merge_blocks(end_of_then, end_of_else, expr_id.into()))
|
Ok(self.merge_blocks(end_of_then, end_of_else, expr_id.into()))
|
||||||
}
|
}
|
||||||
Expr::Let { pat, expr } => {
|
Expr::Let { pat, expr } => {
|
||||||
let Some((cond_place, current)) = self.lower_expr_as_place(current, *expr, true)? else {
|
let Some((cond_place, current)) = self.lower_expr_as_place(current, *expr, true)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let (then_target, else_target) = self.pattern_match(
|
let (then_target, else_target) =
|
||||||
current,
|
self.pattern_match(current, None, cond_place, *pat)?;
|
||||||
None,
|
|
||||||
cond_place,
|
|
||||||
*pat,
|
|
||||||
)?;
|
|
||||||
self.write_bytes_to_place(
|
self.write_bytes_to_place(
|
||||||
then_target,
|
then_target,
|
||||||
place.clone(),
|
place.clone(),
|
||||||
|
@ -533,18 +550,31 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
}
|
}
|
||||||
Expr::Block { id: _, statements, tail, label } => {
|
Expr::Block { id: _, statements, tail, label } => {
|
||||||
if let Some(label) = label {
|
if let Some(label) = label {
|
||||||
self.lower_loop(current, place.clone(), Some(*label), expr_id.into(), |this, begin| {
|
self.lower_loop(
|
||||||
if let Some(current) = this.lower_block_to_place(statements, begin, *tail, place, expr_id.into())? {
|
current,
|
||||||
|
place.clone(),
|
||||||
|
Some(*label),
|
||||||
|
expr_id.into(),
|
||||||
|
|this, begin| {
|
||||||
|
if let Some(current) = this.lower_block_to_place(
|
||||||
|
statements,
|
||||||
|
begin,
|
||||||
|
*tail,
|
||||||
|
place,
|
||||||
|
expr_id.into(),
|
||||||
|
)? {
|
||||||
let end = this.current_loop_end()?;
|
let end = this.current_loop_end()?;
|
||||||
this.set_goto(current, end, expr_id.into());
|
this.set_goto(current, end, expr_id.into());
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
},
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
self.lower_block_to_place(statements, current, *tail, place, expr_id.into())
|
self.lower_block_to_place(statements, current, *tail, place, expr_id.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Expr::Loop { body, label } => self.lower_loop(current, place, *label, expr_id.into(), |this, begin| {
|
Expr::Loop { body, label } => {
|
||||||
|
self.lower_loop(current, place, *label, expr_id.into(), |this, begin| {
|
||||||
let scope = this.push_drop_scope();
|
let scope = this.push_drop_scope();
|
||||||
if let Some((_, mut current)) = this.lower_expr_as_place(begin, *body, true)? {
|
if let Some((_, mut current)) = this.lower_expr_as_place(begin, *body, true)? {
|
||||||
current = scope.pop_and_drop(this, current);
|
current = scope.pop_and_drop(this, current);
|
||||||
|
@ -553,11 +583,14 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
scope.pop_assume_dropped(this);
|
scope.pop_assume_dropped(this);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}),
|
})
|
||||||
|
}
|
||||||
Expr::While { condition, body, label } => {
|
Expr::While { condition, body, label } => {
|
||||||
self.lower_loop(current, place, *label, expr_id.into(), |this, begin| {
|
self.lower_loop(current, place, *label, expr_id.into(), |this, begin| {
|
||||||
let scope = this.push_drop_scope();
|
let scope = this.push_drop_scope();
|
||||||
let Some((discr, to_switch)) = this.lower_expr_to_some_operand(*condition, begin)? else {
|
let Some((discr, to_switch)) =
|
||||||
|
this.lower_expr_to_some_operand(*condition, begin)?
|
||||||
|
else {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
};
|
};
|
||||||
let fail_cond = this.new_basic_block();
|
let fail_cond = this.new_basic_block();
|
||||||
|
@ -583,8 +616,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
Expr::Call { callee, args, .. } => {
|
Expr::Call { callee, args, .. } => {
|
||||||
if let Some((func_id, generic_args)) =
|
if let Some((func_id, generic_args)) = self.infer.method_resolution(expr_id) {
|
||||||
self.infer.method_resolution(expr_id) {
|
|
||||||
let ty = chalk_ir::TyKind::FnDef(
|
let ty = chalk_ir::TyKind::FnDef(
|
||||||
CallableDefId::FunctionId(func_id).to_chalk(self.db),
|
CallableDefId::FunctionId(func_id).to_chalk(self.db),
|
||||||
generic_args,
|
generic_args,
|
||||||
|
@ -604,21 +636,43 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
match &callee_ty.data(Interner).kind {
|
match &callee_ty.data(Interner).kind {
|
||||||
chalk_ir::TyKind::FnDef(..) => {
|
chalk_ir::TyKind::FnDef(..) => {
|
||||||
let func = Operand::from_bytes(vec![], callee_ty.clone());
|
let func = Operand::from_bytes(vec![], callee_ty.clone());
|
||||||
self.lower_call_and_args(func, args.iter().copied(), place, current, self.is_uninhabited(expr_id), expr_id.into())
|
self.lower_call_and_args(
|
||||||
|
func,
|
||||||
|
args.iter().copied(),
|
||||||
|
place,
|
||||||
|
current,
|
||||||
|
self.is_uninhabited(expr_id),
|
||||||
|
expr_id.into(),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
chalk_ir::TyKind::Function(_) => {
|
chalk_ir::TyKind::Function(_) => {
|
||||||
let Some((func, current)) = self.lower_expr_to_some_operand(*callee, current)? else {
|
let Some((func, current)) =
|
||||||
|
self.lower_expr_to_some_operand(*callee, current)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
self.lower_call_and_args(func, args.iter().copied(), place, current, self.is_uninhabited(expr_id), expr_id.into())
|
self.lower_call_and_args(
|
||||||
|
func,
|
||||||
|
args.iter().copied(),
|
||||||
|
place,
|
||||||
|
current,
|
||||||
|
self.is_uninhabited(expr_id),
|
||||||
|
expr_id.into(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
TyKind::Error => {
|
||||||
|
return Err(MirLowerError::MissingFunctionDefinition(self.owner, expr_id))
|
||||||
}
|
}
|
||||||
TyKind::Error => return Err(MirLowerError::MissingFunctionDefinition(self.owner, expr_id)),
|
|
||||||
_ => return Err(MirLowerError::TypeError("function call on bad type")),
|
_ => return Err(MirLowerError::TypeError("function call on bad type")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Expr::MethodCall { receiver, args, method_name, .. } => {
|
Expr::MethodCall { receiver, args, method_name, .. } => {
|
||||||
let (func_id, generic_args) =
|
let (func_id, generic_args) =
|
||||||
self.infer.method_resolution(expr_id).ok_or_else(|| MirLowerError::UnresolvedMethod(method_name.display(self.db.upcast()).to_string()))?;
|
self.infer.method_resolution(expr_id).ok_or_else(|| {
|
||||||
|
MirLowerError::UnresolvedMethod(
|
||||||
|
method_name.display(self.db.upcast()).to_string(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
let func = Operand::from_fn(self.db, func_id, generic_args);
|
let func = Operand::from_fn(self.db, func_id, generic_args);
|
||||||
self.lower_call_and_args(
|
self.lower_call_and_args(
|
||||||
func,
|
func,
|
||||||
|
@ -630,23 +684,27 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
Expr::Match { expr, arms } => {
|
Expr::Match { expr, arms } => {
|
||||||
let Some((cond_place, mut current)) = self.lower_expr_as_place(current, *expr, true)?
|
let Some((cond_place, mut current)) =
|
||||||
|
self.lower_expr_as_place(current, *expr, true)?
|
||||||
else {
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let mut end = None;
|
let mut end = None;
|
||||||
for MatchArm { pat, guard, expr } in arms.iter() {
|
for MatchArm { pat, guard, expr } in arms.iter() {
|
||||||
let (then, mut otherwise) = self.pattern_match(
|
let (then, mut otherwise) =
|
||||||
current,
|
self.pattern_match(current, None, cond_place.clone(), *pat)?;
|
||||||
None,
|
|
||||||
cond_place.clone(),
|
|
||||||
*pat,
|
|
||||||
)?;
|
|
||||||
let then = if let &Some(guard) = guard {
|
let then = if let &Some(guard) = guard {
|
||||||
let next = self.new_basic_block();
|
let next = self.new_basic_block();
|
||||||
let o = otherwise.get_or_insert_with(|| self.new_basic_block());
|
let o = otherwise.get_or_insert_with(|| self.new_basic_block());
|
||||||
if let Some((discr, c)) = self.lower_expr_to_some_operand(guard, then)? {
|
if let Some((discr, c)) = self.lower_expr_to_some_operand(guard, then)? {
|
||||||
self.set_terminator(c, TerminatorKind::SwitchInt { discr, targets: SwitchTargets::static_if(1, next, *o) }, expr_id.into());
|
self.set_terminator(
|
||||||
|
c,
|
||||||
|
TerminatorKind::SwitchInt {
|
||||||
|
discr,
|
||||||
|
targets: SwitchTargets::static_if(1, next, *o),
|
||||||
|
},
|
||||||
|
expr_id.into(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
next
|
next
|
||||||
} else {
|
} else {
|
||||||
|
@ -672,33 +730,53 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
}
|
}
|
||||||
Expr::Continue { label } => {
|
Expr::Continue { label } => {
|
||||||
let loop_data = match label {
|
let loop_data = match label {
|
||||||
Some(l) => self.labeled_loop_blocks.get(l).ok_or(MirLowerError::UnresolvedLabel)?,
|
Some(l) => {
|
||||||
None => self.current_loop_blocks.as_ref().ok_or(MirLowerError::ContinueWithoutLoop)?,
|
self.labeled_loop_blocks.get(l).ok_or(MirLowerError::UnresolvedLabel)?
|
||||||
|
}
|
||||||
|
None => self
|
||||||
|
.current_loop_blocks
|
||||||
|
.as_ref()
|
||||||
|
.ok_or(MirLowerError::ContinueWithoutLoop)?,
|
||||||
};
|
};
|
||||||
let begin = loop_data.begin;
|
let begin = loop_data.begin;
|
||||||
current = self.drop_until_scope(loop_data.drop_scope_index, current);
|
current = self.drop_until_scope(loop_data.drop_scope_index, current);
|
||||||
self.set_goto(current, begin, expr_id.into());
|
self.set_goto(current, begin, expr_id.into());
|
||||||
Ok(None)
|
Ok(None)
|
||||||
},
|
}
|
||||||
&Expr::Break { expr, label } => {
|
&Expr::Break { expr, label } => {
|
||||||
if let Some(expr) = expr {
|
if let Some(expr) = expr {
|
||||||
let loop_data = match label {
|
let loop_data = match label {
|
||||||
Some(l) => self.labeled_loop_blocks.get(&l).ok_or(MirLowerError::UnresolvedLabel)?,
|
Some(l) => self
|
||||||
None => self.current_loop_blocks.as_ref().ok_or(MirLowerError::BreakWithoutLoop)?,
|
.labeled_loop_blocks
|
||||||
|
.get(&l)
|
||||||
|
.ok_or(MirLowerError::UnresolvedLabel)?,
|
||||||
|
None => self
|
||||||
|
.current_loop_blocks
|
||||||
|
.as_ref()
|
||||||
|
.ok_or(MirLowerError::BreakWithoutLoop)?,
|
||||||
};
|
};
|
||||||
let Some(c) = self.lower_expr_to_place(expr, loop_data.place.clone(), current)? else {
|
let Some(c) =
|
||||||
|
self.lower_expr_to_place(expr, loop_data.place.clone(), current)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
current = c;
|
current = c;
|
||||||
}
|
}
|
||||||
let (end, drop_scope) = match label {
|
let (end, drop_scope) = match label {
|
||||||
Some(l) => {
|
Some(l) => {
|
||||||
let loop_blocks = self.labeled_loop_blocks.get(&l).ok_or(MirLowerError::UnresolvedLabel)?;
|
let loop_blocks = self
|
||||||
(loop_blocks.end.expect("We always generate end for labeled loops"), loop_blocks.drop_scope_index)
|
.labeled_loop_blocks
|
||||||
},
|
.get(&l)
|
||||||
None => {
|
.ok_or(MirLowerError::UnresolvedLabel)?;
|
||||||
(self.current_loop_end()?, self.current_loop_blocks.as_ref().unwrap().drop_scope_index)
|
(
|
||||||
},
|
loop_blocks.end.expect("We always generate end for labeled loops"),
|
||||||
|
loop_blocks.drop_scope_index,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
None => (
|
||||||
|
self.current_loop_end()?,
|
||||||
|
self.current_loop_blocks.as_ref().unwrap().drop_scope_index,
|
||||||
|
),
|
||||||
};
|
};
|
||||||
current = self.drop_until_scope(drop_scope, current);
|
current = self.drop_until_scope(drop_scope, current);
|
||||||
self.set_goto(current, end, expr_id.into());
|
self.set_goto(current, end, expr_id.into());
|
||||||
|
@ -706,7 +784,9 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
}
|
}
|
||||||
Expr::Return { expr } => {
|
Expr::Return { expr } => {
|
||||||
if let Some(expr) = expr {
|
if let Some(expr) = expr {
|
||||||
if let Some(c) = self.lower_expr_to_place(*expr, return_slot().into(), current)? {
|
if let Some(c) =
|
||||||
|
self.lower_expr_to_place(*expr, return_slot().into(), current)?
|
||||||
|
{
|
||||||
current = c;
|
current = c;
|
||||||
} else {
|
} else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
|
@ -719,19 +799,17 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
Expr::Yield { .. } => not_supported!("yield"),
|
Expr::Yield { .. } => not_supported!("yield"),
|
||||||
Expr::RecordLit { fields, path, spread, ellipsis: _, is_assignee_expr: _ } => {
|
Expr::RecordLit { fields, path, spread, ellipsis: _, is_assignee_expr: _ } => {
|
||||||
let spread_place = match spread {
|
let spread_place = match spread {
|
||||||
&Some(x) => {
|
&Some(it) => {
|
||||||
let Some((p, c)) = self.lower_expr_as_place(current, x, true)? else {
|
let Some((p, c)) = self.lower_expr_as_place(current, it, true)? else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
current = c;
|
current = c;
|
||||||
Some(p)
|
Some(p)
|
||||||
},
|
}
|
||||||
None => None,
|
None => None,
|
||||||
};
|
};
|
||||||
let variant_id = self
|
let variant_id =
|
||||||
.infer
|
self.infer.variant_resolution_for_expr(expr_id).ok_or_else(|| match path {
|
||||||
.variant_resolution_for_expr(expr_id)
|
|
||||||
.ok_or_else(|| match path {
|
|
||||||
Some(p) => MirLowerError::UnresolvedName(p.display(self.db).to_string()),
|
Some(p) => MirLowerError::UnresolvedName(p.display(self.db).to_string()),
|
||||||
None => MirLowerError::RecordLiteralWithoutPath,
|
None => MirLowerError::RecordLiteralWithoutPath,
|
||||||
})?;
|
})?;
|
||||||
|
@ -746,7 +824,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
for RecordLitField { name, expr } in fields.iter() {
|
for RecordLitField { name, expr } in fields.iter() {
|
||||||
let field_id =
|
let field_id =
|
||||||
variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?;
|
variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?;
|
||||||
let Some((op, c)) = self.lower_expr_to_some_operand(*expr, current)? else {
|
let Some((op, c)) = self.lower_expr_to_some_operand(*expr, current)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
current = c;
|
current = c;
|
||||||
|
@ -758,18 +837,23 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
Rvalue::Aggregate(
|
Rvalue::Aggregate(
|
||||||
AggregateKind::Adt(variant_id, subst),
|
AggregateKind::Adt(variant_id, subst),
|
||||||
match spread_place {
|
match spread_place {
|
||||||
Some(sp) => operands.into_iter().enumerate().map(|(i, x)| {
|
Some(sp) => operands
|
||||||
match x {
|
.into_iter()
|
||||||
Some(x) => x,
|
.enumerate()
|
||||||
|
.map(|(i, it)| match it {
|
||||||
|
Some(it) => it,
|
||||||
None => {
|
None => {
|
||||||
let p = sp.project(ProjectionElem::Field(FieldId {
|
let p =
|
||||||
|
sp.project(ProjectionElem::Field(FieldId {
|
||||||
parent: variant_id,
|
parent: variant_id,
|
||||||
local_id: LocalFieldId::from_raw(RawIdx::from(i as u32)),
|
local_id: LocalFieldId::from_raw(
|
||||||
|
RawIdx::from(i as u32),
|
||||||
|
),
|
||||||
}));
|
}));
|
||||||
Operand::Copy(p)
|
Operand::Copy(p)
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}).collect(),
|
})
|
||||||
|
.collect(),
|
||||||
None => operands.into_iter().collect::<Option<_>>().ok_or(
|
None => operands.into_iter().collect::<Option<_>>().ok_or(
|
||||||
MirLowerError::TypeError("missing field in record literal"),
|
MirLowerError::TypeError("missing field in record literal"),
|
||||||
)?,
|
)?,
|
||||||
|
@ -785,7 +869,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
};
|
};
|
||||||
let local_id =
|
let local_id =
|
||||||
variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?;
|
variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?;
|
||||||
let place = place.project(PlaceElem::Field(FieldId { parent: union_id.into(), local_id }));
|
let place = place.project(PlaceElem::Field(FieldId {
|
||||||
|
parent: union_id.into(),
|
||||||
|
local_id,
|
||||||
|
}));
|
||||||
self.lower_expr_to_place(*expr, place, current)
|
self.lower_expr_to_place(*expr, place, current)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -795,11 +882,18 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
Expr::Async { .. } => not_supported!("async block"),
|
Expr::Async { .. } => not_supported!("async block"),
|
||||||
&Expr::Const(id) => {
|
&Expr::Const(id) => {
|
||||||
let subst = self.placeholder_subst();
|
let subst = self.placeholder_subst();
|
||||||
self.lower_const(id.into(), current, place, subst, expr_id.into(), self.expr_ty_without_adjust(expr_id))?;
|
self.lower_const(
|
||||||
|
id.into(),
|
||||||
|
current,
|
||||||
|
place,
|
||||||
|
subst,
|
||||||
|
expr_id.into(),
|
||||||
|
self.expr_ty_without_adjust(expr_id),
|
||||||
|
)?;
|
||||||
Ok(Some(current))
|
Ok(Some(current))
|
||||||
},
|
}
|
||||||
Expr::Cast { expr, type_ref: _ } => {
|
Expr::Cast { expr, type_ref: _ } => {
|
||||||
let Some((x, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
|
let Some((it, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let source_ty = self.infer[*expr].clone();
|
let source_ty = self.infer[*expr].clone();
|
||||||
|
@ -807,7 +901,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
self.push_assignment(
|
self.push_assignment(
|
||||||
current,
|
current,
|
||||||
place,
|
place,
|
||||||
Rvalue::Cast(cast_kind(&source_ty, &target_ty)?, x, target_ty),
|
Rvalue::Cast(cast_kind(&source_ty, &target_ty)?, it, target_ty),
|
||||||
expr_id.into(),
|
expr_id.into(),
|
||||||
);
|
);
|
||||||
Ok(Some(current))
|
Ok(Some(current))
|
||||||
|
@ -822,23 +916,37 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
}
|
}
|
||||||
Expr::Box { expr } => {
|
Expr::Box { expr } => {
|
||||||
let ty = self.expr_ty_after_adjustments(*expr);
|
let ty = self.expr_ty_after_adjustments(*expr);
|
||||||
self.push_assignment(current, place.clone(), Rvalue::ShallowInitBoxWithAlloc(ty), expr_id.into());
|
self.push_assignment(
|
||||||
let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
|
current,
|
||||||
|
place.clone(),
|
||||||
|
Rvalue::ShallowInitBoxWithAlloc(ty),
|
||||||
|
expr_id.into(),
|
||||||
|
);
|
||||||
|
let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let p = place.project(ProjectionElem::Deref);
|
let p = place.project(ProjectionElem::Deref);
|
||||||
self.push_assignment(current, p, operand.into(), expr_id.into());
|
self.push_assignment(current, p, operand.into(), expr_id.into());
|
||||||
Ok(Some(current))
|
Ok(Some(current))
|
||||||
},
|
}
|
||||||
Expr::Field { .. } | Expr::Index { .. } | Expr::UnaryOp { op: hir_def::hir::UnaryOp::Deref, .. } => {
|
Expr::Field { .. }
|
||||||
let Some((p, current)) = self.lower_expr_as_place_without_adjust(current, expr_id, true)? else {
|
| Expr::Index { .. }
|
||||||
|
| Expr::UnaryOp { op: hir_def::hir::UnaryOp::Deref, .. } => {
|
||||||
|
let Some((p, current)) =
|
||||||
|
self.lower_expr_as_place_without_adjust(current, expr_id, true)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into());
|
self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into());
|
||||||
Ok(Some(current))
|
Ok(Some(current))
|
||||||
}
|
}
|
||||||
Expr::UnaryOp { expr, op: op @ (hir_def::hir::UnaryOp::Not | hir_def::hir::UnaryOp::Neg) } => {
|
Expr::UnaryOp {
|
||||||
let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
|
expr,
|
||||||
|
op: op @ (hir_def::hir::UnaryOp::Not | hir_def::hir::UnaryOp::Neg),
|
||||||
|
} => {
|
||||||
|
let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let operation = match op {
|
let operation = match op {
|
||||||
|
@ -853,7 +961,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
expr_id.into(),
|
expr_id.into(),
|
||||||
);
|
);
|
||||||
Ok(Some(current))
|
Ok(Some(current))
|
||||||
},
|
}
|
||||||
Expr::BinaryOp { lhs, rhs, op } => {
|
Expr::BinaryOp { lhs, rhs, op } => {
|
||||||
let op = op.ok_or(MirLowerError::IncompleteExpr)?;
|
let op = op.ok_or(MirLowerError::IncompleteExpr)?;
|
||||||
let is_builtin = 'b: {
|
let is_builtin = 'b: {
|
||||||
|
@ -868,9 +976,12 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
}
|
}
|
||||||
let builtin_inequal_impls = matches!(
|
let builtin_inequal_impls = matches!(
|
||||||
op,
|
op,
|
||||||
BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) | BinaryOp::Assignment { op: Some(ArithOp::Shl | ArithOp::Shr) }
|
BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr)
|
||||||
|
| BinaryOp::Assignment { op: Some(ArithOp::Shl | ArithOp::Shr) }
|
||||||
);
|
);
|
||||||
lhs_ty.is_scalar() && rhs_ty.is_scalar() && (lhs_ty == rhs_ty || builtin_inequal_impls)
|
lhs_ty.is_scalar()
|
||||||
|
&& rhs_ty.is_scalar()
|
||||||
|
&& (lhs_ty == rhs_ty || builtin_inequal_impls)
|
||||||
};
|
};
|
||||||
if !is_builtin {
|
if !is_builtin {
|
||||||
if let Some((func_id, generic_args)) = self.infer.method_resolution(expr_id) {
|
if let Some((func_id, generic_args)) = self.infer.method_resolution(expr_id) {
|
||||||
|
@ -892,18 +1003,26 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
.infer
|
.infer
|
||||||
.expr_adjustments
|
.expr_adjustments
|
||||||
.get(lhs)
|
.get(lhs)
|
||||||
.and_then(|x| x.split_last())
|
.and_then(|it| it.split_last())
|
||||||
.map(|x| x.1)
|
.map(|it| it.1)
|
||||||
.ok_or(MirLowerError::TypeError("adjustment of binary op was missing"))?;
|
.ok_or(MirLowerError::TypeError(
|
||||||
|
"adjustment of binary op was missing",
|
||||||
|
))?;
|
||||||
let Some((lhs_place, current)) =
|
let Some((lhs_place, current)) =
|
||||||
self.lower_expr_as_place_with_adjust(current, *lhs, false, adjusts)?
|
self.lower_expr_as_place_with_adjust(current, *lhs, false, adjusts)?
|
||||||
else {
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else {
|
let Some((rhs_op, current)) =
|
||||||
|
self.lower_expr_to_some_operand(*rhs, current)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let r_value = Rvalue::CheckedBinaryOp(op.into(), Operand::Copy(lhs_place.clone()), rhs_op);
|
let r_value = Rvalue::CheckedBinaryOp(
|
||||||
|
op.into(),
|
||||||
|
Operand::Copy(lhs_place.clone()),
|
||||||
|
rhs_op,
|
||||||
|
);
|
||||||
self.push_assignment(current, lhs_place, r_value, expr_id.into());
|
self.push_assignment(current, lhs_place, r_value, expr_id.into());
|
||||||
return Ok(Some(current));
|
return Ok(Some(current));
|
||||||
} else {
|
} else {
|
||||||
|
@ -912,14 +1031,17 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
else {
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else {
|
let Some((rhs_op, current)) =
|
||||||
|
self.lower_expr_to_some_operand(*rhs, current)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
self.push_assignment(current, lhs_place, rhs_op.into(), expr_id.into());
|
self.push_assignment(current, lhs_place, rhs_op.into(), expr_id.into());
|
||||||
return Ok(Some(current));
|
return Ok(Some(current));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let Some((lhs_op, current)) = self.lower_expr_to_some_operand(*lhs, current)? else {
|
let Some((lhs_op, current)) = self.lower_expr_to_some_operand(*lhs, current)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
if let hir_def::hir::BinaryOp::LogicOp(op) = op {
|
if let hir_def::hir::BinaryOp::LogicOp(op) = op {
|
||||||
|
@ -928,22 +1050,31 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
syntax::ast::LogicOp::Or => 1,
|
syntax::ast::LogicOp::Or => 1,
|
||||||
};
|
};
|
||||||
let start_of_then = self.new_basic_block();
|
let start_of_then = self.new_basic_block();
|
||||||
self.push_assignment(start_of_then, place.clone(), lhs_op.clone().into(), expr_id.into());
|
self.push_assignment(
|
||||||
|
start_of_then,
|
||||||
|
place.clone(),
|
||||||
|
lhs_op.clone().into(),
|
||||||
|
expr_id.into(),
|
||||||
|
);
|
||||||
let end_of_then = Some(start_of_then);
|
let end_of_then = Some(start_of_then);
|
||||||
let start_of_else = self.new_basic_block();
|
let start_of_else = self.new_basic_block();
|
||||||
let end_of_else =
|
let end_of_else = self.lower_expr_to_place(*rhs, place, start_of_else)?;
|
||||||
self.lower_expr_to_place(*rhs, place, start_of_else)?;
|
|
||||||
self.set_terminator(
|
self.set_terminator(
|
||||||
current,
|
current,
|
||||||
TerminatorKind::SwitchInt {
|
TerminatorKind::SwitchInt {
|
||||||
discr: lhs_op,
|
discr: lhs_op,
|
||||||
targets: SwitchTargets::static_if(value_to_short, start_of_then, start_of_else),
|
targets: SwitchTargets::static_if(
|
||||||
|
value_to_short,
|
||||||
|
start_of_then,
|
||||||
|
start_of_else,
|
||||||
|
),
|
||||||
},
|
},
|
||||||
expr_id.into(),
|
expr_id.into(),
|
||||||
);
|
);
|
||||||
return Ok(self.merge_blocks(end_of_then, end_of_else, expr_id.into()));
|
return Ok(self.merge_blocks(end_of_then, end_of_else, expr_id.into()));
|
||||||
}
|
}
|
||||||
let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else {
|
let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
self.push_assignment(
|
self.push_assignment(
|
||||||
|
@ -976,15 +1107,15 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
};
|
};
|
||||||
let mut lp = None;
|
let mut lp = None;
|
||||||
let mut rp = None;
|
let mut rp = None;
|
||||||
if let Some(x) = lhs {
|
if let Some(it) = lhs {
|
||||||
let Some((o, c)) = self.lower_expr_to_some_operand(x, current)? else {
|
let Some((o, c)) = self.lower_expr_to_some_operand(it, current)? else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
lp = Some(o);
|
lp = Some(o);
|
||||||
current = c;
|
current = c;
|
||||||
}
|
}
|
||||||
if let Some(x) = rhs {
|
if let Some(it) = rhs {
|
||||||
let Some((o, c)) = self.lower_expr_to_some_operand(x, current)? else {
|
let Some((o, c)) = self.lower_expr_to_some_operand(it, current)? else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
rp = Some(o);
|
rp = Some(o);
|
||||||
|
@ -995,20 +1126,28 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
place,
|
place,
|
||||||
Rvalue::Aggregate(
|
Rvalue::Aggregate(
|
||||||
AggregateKind::Adt(st.into(), subst.clone()),
|
AggregateKind::Adt(st.into(), subst.clone()),
|
||||||
self.db.struct_data(st).variant_data.fields().iter().map(|x| {
|
self.db
|
||||||
let o = match x.1.name.as_str() {
|
.struct_data(st)
|
||||||
|
.variant_data
|
||||||
|
.fields()
|
||||||
|
.iter()
|
||||||
|
.map(|it| {
|
||||||
|
let o = match it.1.name.as_str() {
|
||||||
Some("start") => lp.take(),
|
Some("start") => lp.take(),
|
||||||
Some("end") => rp.take(),
|
Some("end") => rp.take(),
|
||||||
Some("exhausted") => Some(Operand::from_bytes(vec![0], TyBuilder::bool())),
|
Some("exhausted") => {
|
||||||
|
Some(Operand::from_bytes(vec![0], TyBuilder::bool()))
|
||||||
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
o.ok_or(MirLowerError::UnresolvedField)
|
o.ok_or(MirLowerError::UnresolvedField)
|
||||||
}).collect::<Result<_>>()?,
|
})
|
||||||
|
.collect::<Result<_>>()?,
|
||||||
),
|
),
|
||||||
expr_id.into(),
|
expr_id.into(),
|
||||||
);
|
);
|
||||||
Ok(Some(current))
|
Ok(Some(current))
|
||||||
},
|
}
|
||||||
Expr::Closure { .. } => {
|
Expr::Closure { .. } => {
|
||||||
let ty = self.expr_ty_without_adjust(expr_id);
|
let ty = self.expr_ty_without_adjust(expr_id);
|
||||||
let TyKind::Closure(id, _) = ty.kind(Interner) else {
|
let TyKind::Closure(id, _) = ty.kind(Interner) else {
|
||||||
|
@ -1020,22 +1159,33 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
for capture in captures.iter() {
|
for capture in captures.iter() {
|
||||||
let p = Place {
|
let p = Place {
|
||||||
local: self.binding_local(capture.place.local)?,
|
local: self.binding_local(capture.place.local)?,
|
||||||
projection: capture.place.projections.clone().into_iter().map(|x| {
|
projection: capture
|
||||||
match x {
|
.place
|
||||||
|
.projections
|
||||||
|
.clone()
|
||||||
|
.into_iter()
|
||||||
|
.map(|it| match it {
|
||||||
ProjectionElem::Deref => ProjectionElem::Deref,
|
ProjectionElem::Deref => ProjectionElem::Deref,
|
||||||
ProjectionElem::Field(x) => ProjectionElem::Field(x),
|
ProjectionElem::Field(it) => ProjectionElem::Field(it),
|
||||||
ProjectionElem::TupleOrClosureField(x) => ProjectionElem::TupleOrClosureField(x),
|
ProjectionElem::TupleOrClosureField(it) => {
|
||||||
ProjectionElem::ConstantIndex { offset, from_end } => ProjectionElem::ConstantIndex { offset, from_end },
|
ProjectionElem::TupleOrClosureField(it)
|
||||||
ProjectionElem::Subslice { from, to } => ProjectionElem::Subslice { from, to },
|
|
||||||
ProjectionElem::OpaqueCast(x) => ProjectionElem::OpaqueCast(x),
|
|
||||||
ProjectionElem::Index(x) => match x { },
|
|
||||||
}
|
}
|
||||||
}).collect(),
|
ProjectionElem::ConstantIndex { offset, from_end } => {
|
||||||
|
ProjectionElem::ConstantIndex { offset, from_end }
|
||||||
|
}
|
||||||
|
ProjectionElem::Subslice { from, to } => {
|
||||||
|
ProjectionElem::Subslice { from, to }
|
||||||
|
}
|
||||||
|
ProjectionElem::OpaqueCast(it) => ProjectionElem::OpaqueCast(it),
|
||||||
|
ProjectionElem::Index(it) => match it {},
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
};
|
};
|
||||||
match &capture.kind {
|
match &capture.kind {
|
||||||
CaptureKind::ByRef(bk) => {
|
CaptureKind::ByRef(bk) => {
|
||||||
let placeholder_subst = self.placeholder_subst();
|
let placeholder_subst = self.placeholder_subst();
|
||||||
let tmp_ty = capture.ty.clone().substitute(Interner, &placeholder_subst);
|
let tmp_ty =
|
||||||
|
capture.ty.clone().substitute(Interner, &placeholder_subst);
|
||||||
let tmp: Place = self.temp(tmp_ty, current, capture.span)?.into();
|
let tmp: Place = self.temp(tmp_ty, current, capture.span)?.into();
|
||||||
self.push_assignment(
|
self.push_assignment(
|
||||||
current,
|
current,
|
||||||
|
@ -1044,7 +1194,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
capture.span,
|
capture.span,
|
||||||
);
|
);
|
||||||
operands.push(Operand::Move(tmp));
|
operands.push(Operand::Move(tmp));
|
||||||
},
|
}
|
||||||
CaptureKind::ByValue => operands.push(Operand::Move(p)),
|
CaptureKind::ByValue => operands.push(Operand::Move(p)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1055,12 +1205,12 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
expr_id.into(),
|
expr_id.into(),
|
||||||
);
|
);
|
||||||
Ok(Some(current))
|
Ok(Some(current))
|
||||||
},
|
}
|
||||||
Expr::Tuple { exprs, is_assignee_expr: _ } => {
|
Expr::Tuple { exprs, is_assignee_expr: _ } => {
|
||||||
let Some(values) = exprs
|
let Some(values) = exprs
|
||||||
.iter()
|
.iter()
|
||||||
.map(|x| {
|
.map(|it| {
|
||||||
let Some((o, c)) = self.lower_expr_to_some_operand(*x, current)? else {
|
let Some((o, c)) = self.lower_expr_to_some_operand(*it, current)? else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
current = c;
|
current = c;
|
||||||
|
@ -1089,8 +1239,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
};
|
};
|
||||||
let Some(values) = elements
|
let Some(values) = elements
|
||||||
.iter()
|
.iter()
|
||||||
.map(|x| {
|
.map(|it| {
|
||||||
let Some((o, c)) = self.lower_expr_to_some_operand(*x, current)? else {
|
let Some((o, c)) = self.lower_expr_to_some_operand(*it, current)? else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
current = c;
|
current = c;
|
||||||
|
@ -1100,15 +1250,14 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
else {
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let r = Rvalue::Aggregate(
|
let r = Rvalue::Aggregate(AggregateKind::Array(elem_ty), values);
|
||||||
AggregateKind::Array(elem_ty),
|
|
||||||
values,
|
|
||||||
);
|
|
||||||
self.push_assignment(current, place, r, expr_id.into());
|
self.push_assignment(current, place, r, expr_id.into());
|
||||||
Ok(Some(current))
|
Ok(Some(current))
|
||||||
}
|
}
|
||||||
Array::Repeat { initializer, .. } => {
|
Array::Repeat { initializer, .. } => {
|
||||||
let Some((init, current)) = self.lower_expr_to_some_operand(*initializer, current)? else {
|
let Some((init, current)) =
|
||||||
|
self.lower_expr_to_some_operand(*initializer, current)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let len = match &self.expr_ty_without_adjust(expr_id).data(Interner).kind {
|
let len = match &self.expr_ty_without_adjust(expr_id).data(Interner).kind {
|
||||||
|
@ -1122,7 +1271,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
let r = Rvalue::Repeat(init, len);
|
let r = Rvalue::Repeat(init, len);
|
||||||
self.push_assignment(current, place, r, expr_id.into());
|
self.push_assignment(current, place, r, expr_id.into());
|
||||||
Ok(Some(current))
|
Ok(Some(current))
|
||||||
},
|
}
|
||||||
},
|
},
|
||||||
Expr::Literal(l) => {
|
Expr::Literal(l) => {
|
||||||
let ty = self.expr_ty_without_adjust(expr_id);
|
let ty = self.expr_ty_without_adjust(expr_id);
|
||||||
|
@ -1136,7 +1285,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
|
|
||||||
fn placeholder_subst(&mut self) -> Substitution {
|
fn placeholder_subst(&mut self) -> Substitution {
|
||||||
let placeholder_subst = match self.owner.as_generic_def_id() {
|
let placeholder_subst = match self.owner.as_generic_def_id() {
|
||||||
Some(x) => TyBuilder::placeholder_subst(self.db, x),
|
Some(it) => TyBuilder::placeholder_subst(self.db, it),
|
||||||
None => Substitution::empty(Interner),
|
None => Substitution::empty(Interner),
|
||||||
};
|
};
|
||||||
placeholder_subst
|
placeholder_subst
|
||||||
|
@ -1226,8 +1375,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
}
|
}
|
||||||
hir_def::hir::Literal::Char(c) => u32::from(*c).to_le_bytes().into(),
|
hir_def::hir::Literal::Char(c) => u32::from(*c).to_le_bytes().into(),
|
||||||
hir_def::hir::Literal::Bool(b) => vec![*b as u8],
|
hir_def::hir::Literal::Bool(b) => vec![*b as u8],
|
||||||
hir_def::hir::Literal::Int(x, _) => x.to_le_bytes()[0..size].into(),
|
hir_def::hir::Literal::Int(it, _) => it.to_le_bytes()[0..size].into(),
|
||||||
hir_def::hir::Literal::Uint(x, _) => x.to_le_bytes()[0..size].into(),
|
hir_def::hir::Literal::Uint(it, _) => it.to_le_bytes()[0..size].into(),
|
||||||
hir_def::hir::Literal::Float(f, _) => match size {
|
hir_def::hir::Literal::Float(f, _) => match size {
|
||||||
8 => f.into_f64().to_le_bytes().into(),
|
8 => f.into_f64().to_le_bytes().into(),
|
||||||
4 => f.into_f32().to_le_bytes().into(),
|
4 => f.into_f32().to_le_bytes().into(),
|
||||||
|
@ -1377,9 +1526,9 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
|
|
||||||
fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty {
|
fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty {
|
||||||
let mut ty = None;
|
let mut ty = None;
|
||||||
if let Some(x) = self.infer.expr_adjustments.get(&e) {
|
if let Some(it) = self.infer.expr_adjustments.get(&e) {
|
||||||
if let Some(x) = x.last() {
|
if let Some(it) = it.last() {
|
||||||
ty = Some(x.target.clone());
|
ty = Some(it.target.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ty.unwrap_or_else(|| self.expr_ty_without_adjust(e))
|
ty.unwrap_or_else(|| self.expr_ty_without_adjust(e))
|
||||||
|
@ -1401,7 +1550,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
|
|
||||||
fn discr_temp_place(&mut self, current: BasicBlockId) -> Place {
|
fn discr_temp_place(&mut self, current: BasicBlockId) -> Place {
|
||||||
match &self.discr_temp {
|
match &self.discr_temp {
|
||||||
Some(x) => x.clone(),
|
Some(it) => it.clone(),
|
||||||
None => {
|
None => {
|
||||||
let tmp: Place = self
|
let tmp: Place = self
|
||||||
.temp(TyBuilder::discr_ty(), current, MirSpan::Unknown)
|
.temp(TyBuilder::discr_ty(), current, MirSpan::Unknown)
|
||||||
|
@ -1448,7 +1597,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn has_adjustments(&self, expr_id: ExprId) -> bool {
|
fn has_adjustments(&self, expr_id: ExprId) -> bool {
|
||||||
!self.infer.expr_adjustments.get(&expr_id).map(|x| x.is_empty()).unwrap_or(true)
|
!self.infer.expr_adjustments.get(&expr_id).map(|it| it.is_empty()).unwrap_or(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn merge_blocks(
|
fn merge_blocks(
|
||||||
|
@ -1478,7 +1627,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
))?
|
))?
|
||||||
.end
|
.end
|
||||||
{
|
{
|
||||||
Some(x) => x,
|
Some(it) => it,
|
||||||
None => {
|
None => {
|
||||||
let s = self.new_basic_block();
|
let s = self.new_basic_block();
|
||||||
self.current_loop_blocks
|
self.current_loop_blocks
|
||||||
|
@ -1602,10 +1751,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
pick_binding: impl Fn(BindingId) -> bool,
|
pick_binding: impl Fn(BindingId) -> bool,
|
||||||
) -> Result<BasicBlockId> {
|
) -> Result<BasicBlockId> {
|
||||||
let base_param_count = self.result.param_locals.len();
|
let base_param_count = self.result.param_locals.len();
|
||||||
self.result.param_locals.extend(params.clone().map(|(x, ty)| {
|
self.result.param_locals.extend(params.clone().map(|(it, ty)| {
|
||||||
let local_id = self.result.locals.alloc(Local { ty });
|
let local_id = self.result.locals.alloc(Local { ty });
|
||||||
self.drop_scopes.last_mut().unwrap().locals.push(local_id);
|
self.drop_scopes.last_mut().unwrap().locals.push(local_id);
|
||||||
if let Pat::Bind { id, subpat: None } = self.body[x] {
|
if let Pat::Bind { id, subpat: None } = self.body[it] {
|
||||||
if matches!(
|
if matches!(
|
||||||
self.body.bindings[id].mode,
|
self.body.bindings[id].mode,
|
||||||
BindingAnnotation::Unannotated | BindingAnnotation::Mutable
|
BindingAnnotation::Unannotated | BindingAnnotation::Mutable
|
||||||
|
@ -1646,7 +1795,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
|
|
||||||
fn binding_local(&self, b: BindingId) -> Result<LocalId> {
|
fn binding_local(&self, b: BindingId) -> Result<LocalId> {
|
||||||
match self.result.binding_locals.get(b) {
|
match self.result.binding_locals.get(b) {
|
||||||
Some(x) => Ok(*x),
|
Some(it) => Ok(*it),
|
||||||
None => {
|
None => {
|
||||||
// FIXME: It should never happens, but currently it will happen in `const_dependent_on_local` test, which
|
// FIXME: It should never happens, but currently it will happen in `const_dependent_on_local` test, which
|
||||||
// is a hir lowering problem IMO.
|
// is a hir lowering problem IMO.
|
||||||
|
@ -1731,6 +1880,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
|
|
||||||
fn cast_kind(source_ty: &Ty, target_ty: &Ty) -> Result<CastKind> {
|
fn cast_kind(source_ty: &Ty, target_ty: &Ty) -> Result<CastKind> {
|
||||||
Ok(match (source_ty.kind(Interner), target_ty.kind(Interner)) {
|
Ok(match (source_ty.kind(Interner), target_ty.kind(Interner)) {
|
||||||
|
(TyKind::FnDef(..), TyKind::Function(_)) => CastKind::Pointer(PointerCast::ReifyFnPointer),
|
||||||
(TyKind::Scalar(s), TyKind::Scalar(t)) => match (s, t) {
|
(TyKind::Scalar(s), TyKind::Scalar(t)) => match (s, t) {
|
||||||
(chalk_ir::Scalar::Float(_), chalk_ir::Scalar::Float(_)) => CastKind::FloatToFloat,
|
(chalk_ir::Scalar::Float(_), chalk_ir::Scalar::Float(_)) => CastKind::FloatToFloat,
|
||||||
(chalk_ir::Scalar::Float(_), _) => CastKind::FloatToInt,
|
(chalk_ir::Scalar::Float(_), _) => CastKind::FloatToInt,
|
||||||
|
@ -1742,17 +1892,17 @@ fn cast_kind(source_ty: &Ty, target_ty: &Ty) -> Result<CastKind> {
|
||||||
(TyKind::Raw(_, a) | TyKind::Ref(_, _, a), TyKind::Raw(_, b) | TyKind::Ref(_, _, b)) => {
|
(TyKind::Raw(_, a) | TyKind::Ref(_, _, a), TyKind::Raw(_, b) | TyKind::Ref(_, _, b)) => {
|
||||||
CastKind::Pointer(if a == b {
|
CastKind::Pointer(if a == b {
|
||||||
PointerCast::MutToConstPointer
|
PointerCast::MutToConstPointer
|
||||||
} else if matches!(a.kind(Interner), TyKind::Slice(_) | TyKind::Str)
|
} else if matches!(b.kind(Interner), TyKind::Slice(_))
|
||||||
&& matches!(b.kind(Interner), TyKind::Slice(_) | TyKind::Str)
|
&& matches!(a.kind(Interner), TyKind::Array(_, _))
|
||||||
|
|| matches!(b.kind(Interner), TyKind::Dyn(_))
|
||||||
{
|
{
|
||||||
// slice to slice cast is no-op (metadata is not touched), so we use this
|
|
||||||
PointerCast::MutToConstPointer
|
|
||||||
} else if matches!(b.kind(Interner), TyKind::Slice(_) | TyKind::Dyn(_)) {
|
|
||||||
PointerCast::Unsize
|
PointerCast::Unsize
|
||||||
} else if matches!(a.kind(Interner), TyKind::Slice(s) if s == b) {
|
} else if matches!(a.kind(Interner), TyKind::Slice(s) if s == b) {
|
||||||
PointerCast::ArrayToPointer
|
PointerCast::ArrayToPointer
|
||||||
} else {
|
} else {
|
||||||
// cast between two sized pointer, like *const i32 to *const i8. There is no specific variant
|
// cast between two sized pointer, like *const i32 to *const i8, or two unsized pointer, like
|
||||||
|
// slice to slice, slice to str, ... . These are no-ops (even in the unsized case, no metadata
|
||||||
|
// will be touched) but there is no specific variant
|
||||||
// for it in `PointerCast` so we use `MutToConstPointer`
|
// for it in `PointerCast` so we use `MutToConstPointer`
|
||||||
PointerCast::MutToConstPointer
|
PointerCast::MutToConstPointer
|
||||||
})
|
})
|
||||||
|
@ -1796,7 +1946,7 @@ pub fn mir_body_for_closure_query(
|
||||||
implementation_error!("closure has not callable sig");
|
implementation_error!("closure has not callable sig");
|
||||||
};
|
};
|
||||||
let current = ctx.lower_params_and_bindings(
|
let current = ctx.lower_params_and_bindings(
|
||||||
args.iter().zip(sig.params().iter()).map(|(x, y)| (*x, y.clone())),
|
args.iter().zip(sig.params().iter()).map(|(it, y)| (*it, y.clone())),
|
||||||
|_| true,
|
|_| true,
|
||||||
)?;
|
)?;
|
||||||
if let Some(current) = ctx.lower_expr_to_place(*root, return_slot().into(), current)? {
|
if let Some(current) = ctx.lower_expr_to_place(*root, return_slot().into(), current)? {
|
||||||
|
@ -1815,34 +1965,35 @@ pub fn mir_body_for_closure_query(
|
||||||
FnTrait::FnMut | FnTrait::Fn => vec![ProjectionElem::Deref],
|
FnTrait::FnMut | FnTrait::Fn => vec![ProjectionElem::Deref],
|
||||||
};
|
};
|
||||||
ctx.result.walk_places(|p| {
|
ctx.result.walk_places(|p| {
|
||||||
if let Some(x) = upvar_map.get(&p.local) {
|
if let Some(it) = upvar_map.get(&p.local) {
|
||||||
let r = x.iter().find(|x| {
|
let r = it.iter().find(|it| {
|
||||||
if p.projection.len() < x.0.place.projections.len() {
|
if p.projection.len() < it.0.place.projections.len() {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
for (x, y) in p.projection.iter().zip(x.0.place.projections.iter()) {
|
for (it, y) in p.projection.iter().zip(it.0.place.projections.iter()) {
|
||||||
match (x, y) {
|
match (it, y) {
|
||||||
(ProjectionElem::Deref, ProjectionElem::Deref) => (),
|
(ProjectionElem::Deref, ProjectionElem::Deref) => (),
|
||||||
(ProjectionElem::Field(x), ProjectionElem::Field(y)) if x == y => (),
|
(ProjectionElem::Field(it), ProjectionElem::Field(y)) if it == y => (),
|
||||||
(
|
(
|
||||||
ProjectionElem::TupleOrClosureField(x),
|
ProjectionElem::TupleOrClosureField(it),
|
||||||
ProjectionElem::TupleOrClosureField(y),
|
ProjectionElem::TupleOrClosureField(y),
|
||||||
) if x == y => (),
|
) if it == y => (),
|
||||||
_ => return false,
|
_ => return false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
true
|
true
|
||||||
});
|
});
|
||||||
match r {
|
match r {
|
||||||
Some(x) => {
|
Some(it) => {
|
||||||
p.local = closure_local;
|
p.local = closure_local;
|
||||||
let mut next_projs = closure_projection.clone();
|
let mut next_projs = closure_projection.clone();
|
||||||
next_projs.push(PlaceElem::TupleOrClosureField(x.1));
|
next_projs.push(PlaceElem::TupleOrClosureField(it.1));
|
||||||
let prev_projs = mem::take(&mut p.projection);
|
let prev_projs = mem::take(&mut p.projection);
|
||||||
if x.0.kind != CaptureKind::ByValue {
|
if it.0.kind != CaptureKind::ByValue {
|
||||||
next_projs.push(ProjectionElem::Deref);
|
next_projs.push(ProjectionElem::Deref);
|
||||||
}
|
}
|
||||||
next_projs.extend(prev_projs.iter().cloned().skip(x.0.place.projections.len()));
|
next_projs
|
||||||
|
.extend(prev_projs.iter().cloned().skip(it.0.place.projections.len()));
|
||||||
p.projection = next_projs.into();
|
p.projection = next_projs.into();
|
||||||
}
|
}
|
||||||
None => err = Some(p.clone()),
|
None => err = Some(p.clone()),
|
||||||
|
@ -1902,8 +2053,8 @@ pub fn lower_to_mir(
|
||||||
// need to take this input explicitly.
|
// need to take this input explicitly.
|
||||||
root_expr: ExprId,
|
root_expr: ExprId,
|
||||||
) -> Result<MirBody> {
|
) -> Result<MirBody> {
|
||||||
if let Some((_, x)) = infer.type_mismatches().next() {
|
if let Some((_, it)) = infer.type_mismatches().next() {
|
||||||
return Err(MirLowerError::TypeMismatch(x.clone()));
|
return Err(MirLowerError::TypeMismatch(it.clone()));
|
||||||
}
|
}
|
||||||
let mut ctx = MirLowerCtx::new(db, owner, body, infer);
|
let mut ctx = MirLowerCtx::new(db, owner, body, infer);
|
||||||
// 0 is return local
|
// 0 is return local
|
||||||
|
@ -1929,7 +2080,7 @@ pub fn lower_to_mir(
|
||||||
body.params
|
body.params
|
||||||
.iter()
|
.iter()
|
||||||
.zip(callable_sig.params().iter())
|
.zip(callable_sig.params().iter())
|
||||||
.map(|(x, y)| (*x, y.clone())),
|
.map(|(it, y)| (*it, y.clone())),
|
||||||
binding_picker,
|
binding_picker,
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
//! MIR lowering for places
|
//! MIR lowering for places
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use hir_def::{lang_item::lang_attr, FunctionId};
|
use hir_def::FunctionId;
|
||||||
use hir_expand::name;
|
use hir_expand::name;
|
||||||
|
|
||||||
macro_rules! not_supported {
|
macro_rules! not_supported {
|
||||||
($x: expr) => {
|
($it: expr) => {
|
||||||
return Err(MirLowerError::NotSupported(format!($x)))
|
return Err(MirLowerError::NotSupported(format!($it)))
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -18,7 +18,9 @@ impl MirLowerCtx<'_> {
|
||||||
) -> Result<Option<(Place, BasicBlockId)>> {
|
) -> Result<Option<(Place, BasicBlockId)>> {
|
||||||
let ty = self.expr_ty_without_adjust(expr_id);
|
let ty = self.expr_ty_without_adjust(expr_id);
|
||||||
let place = self.temp(ty, prev_block, expr_id.into())?;
|
let place = self.temp(ty, prev_block, expr_id.into())?;
|
||||||
let Some(current) = self.lower_expr_to_place_without_adjust(expr_id, place.into(), prev_block)? else {
|
let Some(current) =
|
||||||
|
self.lower_expr_to_place_without_adjust(expr_id, place.into(), prev_block)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
Ok(Some((place.into(), current)))
|
Ok(Some((place.into(), current)))
|
||||||
|
@ -32,10 +34,12 @@ impl MirLowerCtx<'_> {
|
||||||
) -> Result<Option<(Place, BasicBlockId)>> {
|
) -> Result<Option<(Place, BasicBlockId)>> {
|
||||||
let ty = adjustments
|
let ty = adjustments
|
||||||
.last()
|
.last()
|
||||||
.map(|x| x.target.clone())
|
.map(|it| it.target.clone())
|
||||||
.unwrap_or_else(|| self.expr_ty_without_adjust(expr_id));
|
.unwrap_or_else(|| self.expr_ty_without_adjust(expr_id));
|
||||||
let place = self.temp(ty, prev_block, expr_id.into())?;
|
let place = self.temp(ty, prev_block, expr_id.into())?;
|
||||||
let Some(current) = self.lower_expr_to_place_with_adjust(expr_id, place.into(), prev_block, adjustments)? else {
|
let Some(current) =
|
||||||
|
self.lower_expr_to_place_with_adjust(expr_id, place.into(), prev_block, adjustments)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
Ok(Some((place.into(), current)))
|
Ok(Some((place.into(), current)))
|
||||||
|
@ -57,16 +61,17 @@ impl MirLowerCtx<'_> {
|
||||||
if let Some((last, rest)) = adjustments.split_last() {
|
if let Some((last, rest)) = adjustments.split_last() {
|
||||||
match last.kind {
|
match last.kind {
|
||||||
Adjust::Deref(None) => {
|
Adjust::Deref(None) => {
|
||||||
let Some(mut x) = self.lower_expr_as_place_with_adjust(
|
let Some(mut it) = self.lower_expr_as_place_with_adjust(
|
||||||
current,
|
current,
|
||||||
expr_id,
|
expr_id,
|
||||||
upgrade_rvalue,
|
upgrade_rvalue,
|
||||||
rest,
|
rest,
|
||||||
)? else {
|
)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
x.0 = x.0.project(ProjectionElem::Deref);
|
it.0 = it.0.project(ProjectionElem::Deref);
|
||||||
Ok(Some(x))
|
Ok(Some(it))
|
||||||
}
|
}
|
||||||
Adjust::Deref(Some(od)) => {
|
Adjust::Deref(Some(od)) => {
|
||||||
let Some((r, current)) = self.lower_expr_as_place_with_adjust(
|
let Some((r, current)) = self.lower_expr_as_place_with_adjust(
|
||||||
|
@ -74,14 +79,15 @@ impl MirLowerCtx<'_> {
|
||||||
expr_id,
|
expr_id,
|
||||||
upgrade_rvalue,
|
upgrade_rvalue,
|
||||||
rest,
|
rest,
|
||||||
)? else {
|
)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
self.lower_overloaded_deref(
|
self.lower_overloaded_deref(
|
||||||
current,
|
current,
|
||||||
r,
|
r,
|
||||||
rest.last()
|
rest.last()
|
||||||
.map(|x| x.target.clone())
|
.map(|it| it.target.clone())
|
||||||
.unwrap_or_else(|| self.expr_ty_without_adjust(expr_id)),
|
.unwrap_or_else(|| self.expr_ty_without_adjust(expr_id)),
|
||||||
last.target.clone(),
|
last.target.clone(),
|
||||||
expr_id.into(),
|
expr_id.into(),
|
||||||
|
@ -156,7 +162,7 @@ impl MirLowerCtx<'_> {
|
||||||
let is_builtin = match self.expr_ty_without_adjust(*expr).kind(Interner) {
|
let is_builtin = match self.expr_ty_without_adjust(*expr).kind(Interner) {
|
||||||
TyKind::Ref(..) | TyKind::Raw(..) => true,
|
TyKind::Ref(..) | TyKind::Raw(..) => true,
|
||||||
TyKind::Adt(id, _) => {
|
TyKind::Adt(id, _) => {
|
||||||
if let Some(lang_item) = lang_attr(self.db.upcast(), id.0) {
|
if let Some(lang_item) = self.db.lang_attr(id.0.into()) {
|
||||||
lang_item == LangItem::OwnedBox
|
lang_item == LangItem::OwnedBox
|
||||||
} else {
|
} else {
|
||||||
false
|
false
|
||||||
|
@ -165,7 +171,8 @@ impl MirLowerCtx<'_> {
|
||||||
_ => false,
|
_ => false,
|
||||||
};
|
};
|
||||||
if !is_builtin {
|
if !is_builtin {
|
||||||
let Some((p, current)) = self.lower_expr_as_place(current, *expr, true)? else {
|
let Some((p, current)) = self.lower_expr_as_place(current, *expr, true)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
return self.lower_overloaded_deref(
|
return self.lower_overloaded_deref(
|
||||||
|
@ -192,7 +199,8 @@ impl MirLowerCtx<'_> {
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let Some((mut r, current)) = self.lower_expr_as_place(current, *expr, true)? else {
|
let Some((mut r, current)) = self.lower_expr_as_place(current, *expr, true)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
r = r.project(ProjectionElem::Deref);
|
r = r.project(ProjectionElem::Deref);
|
||||||
|
@ -217,12 +225,18 @@ impl MirLowerCtx<'_> {
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
let Some(index_fn) = self.infer.method_resolution(expr_id) else {
|
let Some(index_fn) = self.infer.method_resolution(expr_id) else {
|
||||||
return Err(MirLowerError::UnresolvedMethod("[overloaded index]".to_string()));
|
return Err(MirLowerError::UnresolvedMethod(
|
||||||
|
"[overloaded index]".to_string(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let Some((base_place, current)) = self.lower_expr_as_place(current, *base, true)? else {
|
let Some((base_place, current)) =
|
||||||
|
self.lower_expr_as_place(current, *base, true)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let Some((index_operand, current)) = self.lower_expr_to_some_operand(*index, current)? else {
|
let Some((index_operand, current)) =
|
||||||
|
self.lower_expr_to_some_operand(*index, current)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
return self.lower_overloaded_index(
|
return self.lower_overloaded_index(
|
||||||
|
@ -239,8 +253,8 @@ impl MirLowerCtx<'_> {
|
||||||
.infer
|
.infer
|
||||||
.expr_adjustments
|
.expr_adjustments
|
||||||
.get(base)
|
.get(base)
|
||||||
.and_then(|x| x.split_last())
|
.and_then(|it| it.split_last())
|
||||||
.map(|x| x.1)
|
.map(|it| it.1)
|
||||||
.unwrap_or(&[]);
|
.unwrap_or(&[]);
|
||||||
let Some((mut p_base, current)) =
|
let Some((mut p_base, current)) =
|
||||||
self.lower_expr_as_place_with_adjust(current, *base, true, adjusts)?
|
self.lower_expr_as_place_with_adjust(current, *base, true, adjusts)?
|
||||||
|
@ -249,7 +263,8 @@ impl MirLowerCtx<'_> {
|
||||||
};
|
};
|
||||||
let l_index =
|
let l_index =
|
||||||
self.temp(self.expr_ty_after_adjustments(*index), current, expr_id.into())?;
|
self.temp(self.expr_ty_after_adjustments(*index), current, expr_id.into())?;
|
||||||
let Some(current) = self.lower_expr_to_place(*index, l_index.into(), current)? else {
|
let Some(current) = self.lower_expr_to_place(*index, l_index.into(), current)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
p_base = p_base.project(ProjectionElem::Index(l_index));
|
p_base = p_base.project(ProjectionElem::Index(l_index));
|
||||||
|
@ -282,7 +297,15 @@ impl MirLowerCtx<'_> {
|
||||||
)
|
)
|
||||||
.intern(Interner),
|
.intern(Interner),
|
||||||
);
|
);
|
||||||
let Some(current) = self.lower_call(index_fn_op, Box::new([Operand::Copy(place), index_operand]), result.clone(), current, false, span)? else {
|
let Some(current) = self.lower_call(
|
||||||
|
index_fn_op,
|
||||||
|
Box::new([Operand::Copy(place), index_operand]),
|
||||||
|
result.clone(),
|
||||||
|
current,
|
||||||
|
false,
|
||||||
|
span,
|
||||||
|
)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
result = result.project(ProjectionElem::Deref);
|
result = result.project(ProjectionElem::Deref);
|
||||||
|
@ -329,7 +352,15 @@ impl MirLowerCtx<'_> {
|
||||||
.intern(Interner),
|
.intern(Interner),
|
||||||
);
|
);
|
||||||
let mut result: Place = self.temp(target_ty_ref, current, span)?.into();
|
let mut result: Place = self.temp(target_ty_ref, current, span)?.into();
|
||||||
let Some(current) = self.lower_call(deref_fn_op, Box::new([Operand::Copy(ref_place)]), result.clone(), current, false, span)? else {
|
let Some(current) = self.lower_call(
|
||||||
|
deref_fn_op,
|
||||||
|
Box::new([Operand::Copy(ref_place)]),
|
||||||
|
result.clone(),
|
||||||
|
current,
|
||||||
|
false,
|
||||||
|
span,
|
||||||
|
)?
|
||||||
|
else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
result = result.project(ProjectionElem::Deref);
|
result = result.project(ProjectionElem::Deref);
|
||||||
|
|
|
@ -307,6 +307,11 @@ impl MirLowerCtx<'_> {
|
||||||
mode,
|
mode,
|
||||||
)?,
|
)?,
|
||||||
None => {
|
None => {
|
||||||
|
// The path is not a variant, so it is a const
|
||||||
|
if mode != MatchingMode::Check {
|
||||||
|
// A const don't bind anything. Only needs check.
|
||||||
|
return Ok((current, current_else));
|
||||||
|
}
|
||||||
let unresolved_name = || MirLowerError::unresolved_path(self.db, p);
|
let unresolved_name = || MirLowerError::unresolved_path(self.db, p);
|
||||||
let resolver = self.owner.resolver(self.db.upcast());
|
let resolver = self.owner.resolver(self.db.upcast());
|
||||||
let pr = resolver
|
let pr = resolver
|
||||||
|
@ -362,8 +367,8 @@ impl MirLowerCtx<'_> {
|
||||||
},
|
},
|
||||||
Pat::Lit(l) => match &self.body.exprs[*l] {
|
Pat::Lit(l) => match &self.body.exprs[*l] {
|
||||||
Expr::Literal(l) => {
|
Expr::Literal(l) => {
|
||||||
let c = self.lower_literal_to_operand(self.infer[pattern].clone(), l)?;
|
|
||||||
if mode == MatchingMode::Check {
|
if mode == MatchingMode::Check {
|
||||||
|
let c = self.lower_literal_to_operand(self.infer[pattern].clone(), l)?;
|
||||||
self.pattern_match_const(current_else, current, c, cond_place, pattern)?
|
self.pattern_match_const(current_else, current, c, cond_place, pattern)?
|
||||||
} else {
|
} else {
|
||||||
(current, current_else)
|
(current, current_else)
|
||||||
|
|
|
@ -13,15 +13,14 @@ use chalk_ir::{
|
||||||
fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable},
|
fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable},
|
||||||
ConstData, DebruijnIndex,
|
ConstData, DebruijnIndex,
|
||||||
};
|
};
|
||||||
use hir_def::{DefWithBodyId, GeneralConstId};
|
use hir_def::DefWithBodyId;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
consteval::unknown_const,
|
consteval::{intern_const_scalar, unknown_const},
|
||||||
db::HirDatabase,
|
db::HirDatabase,
|
||||||
from_placeholder_idx,
|
from_placeholder_idx,
|
||||||
infer::normalize,
|
infer::normalize,
|
||||||
method_resolution::lookup_impl_const,
|
|
||||||
utils::{generics, Generics},
|
utils::{generics, Generics},
|
||||||
ClosureId, Const, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, TyKind,
|
ClosureId, Const, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, TyKind,
|
||||||
};
|
};
|
||||||
|
@ -29,8 +28,8 @@ use crate::{
|
||||||
use super::{MirBody, MirLowerError, Operand, Rvalue, StatementKind, TerminatorKind};
|
use super::{MirBody, MirLowerError, Operand, Rvalue, StatementKind, TerminatorKind};
|
||||||
|
|
||||||
macro_rules! not_supported {
|
macro_rules! not_supported {
|
||||||
($x: expr) => {
|
($it: expr) => {
|
||||||
return Err(MirLowerError::NotSupported(format!($x)))
|
return Err(MirLowerError::NotSupported(format!($it)))
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -97,16 +96,16 @@ impl FallibleTypeFolder<Interner> for Filler<'_> {
|
||||||
idx: chalk_ir::PlaceholderIndex,
|
idx: chalk_ir::PlaceholderIndex,
|
||||||
_outer_binder: DebruijnIndex,
|
_outer_binder: DebruijnIndex,
|
||||||
) -> std::result::Result<chalk_ir::Const<Interner>, Self::Error> {
|
) -> std::result::Result<chalk_ir::Const<Interner>, Self::Error> {
|
||||||
let x = from_placeholder_idx(self.db, idx);
|
let it = from_placeholder_idx(self.db, idx);
|
||||||
let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(x)) else {
|
let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(it)) else {
|
||||||
not_supported!("missing idx in generics");
|
not_supported!("missing idx in generics");
|
||||||
};
|
};
|
||||||
Ok(self
|
Ok(self
|
||||||
.subst
|
.subst
|
||||||
.as_slice(Interner)
|
.as_slice(Interner)
|
||||||
.get(idx)
|
.get(idx)
|
||||||
.and_then(|x| x.constant(Interner))
|
.and_then(|it| it.constant(Interner))
|
||||||
.ok_or_else(|| MirLowerError::GenericArgNotProvided(x, self.subst.clone()))?
|
.ok_or_else(|| MirLowerError::GenericArgNotProvided(it, self.subst.clone()))?
|
||||||
.clone())
|
.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -115,16 +114,16 @@ impl FallibleTypeFolder<Interner> for Filler<'_> {
|
||||||
idx: chalk_ir::PlaceholderIndex,
|
idx: chalk_ir::PlaceholderIndex,
|
||||||
_outer_binder: DebruijnIndex,
|
_outer_binder: DebruijnIndex,
|
||||||
) -> std::result::Result<Ty, Self::Error> {
|
) -> std::result::Result<Ty, Self::Error> {
|
||||||
let x = from_placeholder_idx(self.db, idx);
|
let it = from_placeholder_idx(self.db, idx);
|
||||||
let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(x)) else {
|
let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(it)) else {
|
||||||
not_supported!("missing idx in generics");
|
not_supported!("missing idx in generics");
|
||||||
};
|
};
|
||||||
Ok(self
|
Ok(self
|
||||||
.subst
|
.subst
|
||||||
.as_slice(Interner)
|
.as_slice(Interner)
|
||||||
.get(idx)
|
.get(idx)
|
||||||
.and_then(|x| x.ty(Interner))
|
.and_then(|it| it.ty(Interner))
|
||||||
.ok_or_else(|| MirLowerError::GenericArgNotProvided(x, self.subst.clone()))?
|
.ok_or_else(|| MirLowerError::GenericArgNotProvided(it, self.subst.clone()))?
|
||||||
.clone())
|
.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -180,7 +179,7 @@ impl Filler<'_> {
|
||||||
MirLowerError::GenericArgNotProvided(
|
MirLowerError::GenericArgNotProvided(
|
||||||
self.generics
|
self.generics
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|x| x.iter().nth(b.index))
|
.and_then(|it| it.iter().nth(b.index))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.0,
|
.0,
|
||||||
self.subst.clone(),
|
self.subst.clone(),
|
||||||
|
@ -193,25 +192,12 @@ impl Filler<'_> {
|
||||||
| chalk_ir::ConstValue::Placeholder(_) => {}
|
| chalk_ir::ConstValue::Placeholder(_) => {}
|
||||||
chalk_ir::ConstValue::Concrete(cc) => match &cc.interned {
|
chalk_ir::ConstValue::Concrete(cc) => match &cc.interned {
|
||||||
crate::ConstScalar::UnevaluatedConst(const_id, subst) => {
|
crate::ConstScalar::UnevaluatedConst(const_id, subst) => {
|
||||||
let mut const_id = *const_id;
|
|
||||||
let mut subst = subst.clone();
|
let mut subst = subst.clone();
|
||||||
self.fill_subst(&mut subst)?;
|
self.fill_subst(&mut subst)?;
|
||||||
if let GeneralConstId::ConstId(c) = const_id {
|
*c = intern_const_scalar(
|
||||||
let (c, s) = lookup_impl_const(
|
crate::ConstScalar::UnevaluatedConst(*const_id, subst),
|
||||||
self.db,
|
c.data(Interner).ty.clone(),
|
||||||
self.db.trait_environment_for_body(self.owner),
|
|
||||||
c,
|
|
||||||
subst,
|
|
||||||
);
|
);
|
||||||
const_id = GeneralConstId::ConstId(c);
|
|
||||||
subst = s;
|
|
||||||
}
|
|
||||||
let result =
|
|
||||||
self.db.const_eval(const_id.into(), subst).map_err(|e| {
|
|
||||||
let name = const_id.name(self.db.upcast());
|
|
||||||
MirLowerError::ConstEvalError(name, Box::new(e))
|
|
||||||
})?;
|
|
||||||
*c = result;
|
|
||||||
}
|
}
|
||||||
crate::ConstScalar::Bytes(_, _) | crate::ConstScalar::Unknown => (),
|
crate::ConstScalar::Bytes(_, _) | crate::ConstScalar::Unknown => (),
|
||||||
},
|
},
|
||||||
|
|
|
@ -135,7 +135,7 @@ impl<'a> MirPrettyCtx<'a> {
|
||||||
|
|
||||||
fn for_closure(&mut self, closure: ClosureId) {
|
fn for_closure(&mut self, closure: ClosureId) {
|
||||||
let body = match self.db.mir_body_for_closure(closure) {
|
let body = match self.db.mir_body_for_closure(closure) {
|
||||||
Ok(x) => x,
|
Ok(it) => it,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
wln!(self, "// error in {closure:?}: {e:?}");
|
wln!(self, "// error in {closure:?}: {e:?}");
|
||||||
return;
|
return;
|
||||||
|
@ -145,7 +145,7 @@ impl<'a> MirPrettyCtx<'a> {
|
||||||
let indent = mem::take(&mut self.indent);
|
let indent = mem::take(&mut self.indent);
|
||||||
let mut ctx = MirPrettyCtx {
|
let mut ctx = MirPrettyCtx {
|
||||||
body: &body,
|
body: &body,
|
||||||
local_to_binding: body.binding_locals.iter().map(|(x, y)| (*y, x)).collect(),
|
local_to_binding: body.binding_locals.iter().map(|(it, y)| (*y, it)).collect(),
|
||||||
result,
|
result,
|
||||||
indent,
|
indent,
|
||||||
..*self
|
..*self
|
||||||
|
@ -167,7 +167,7 @@ impl<'a> MirPrettyCtx<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new(body: &'a MirBody, hir_body: &'a Body, db: &'a dyn HirDatabase) -> Self {
|
fn new(body: &'a MirBody, hir_body: &'a Body, db: &'a dyn HirDatabase) -> Self {
|
||||||
let local_to_binding = body.binding_locals.iter().map(|(x, y)| (*y, x)).collect();
|
let local_to_binding = body.binding_locals.iter().map(|(it, y)| (*y, it)).collect();
|
||||||
MirPrettyCtx {
|
MirPrettyCtx {
|
||||||
body,
|
body,
|
||||||
db,
|
db,
|
||||||
|
@ -315,17 +315,17 @@ impl<'a> MirPrettyCtx<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ProjectionElem::TupleOrClosureField(x) => {
|
ProjectionElem::TupleOrClosureField(it) => {
|
||||||
f(this, local, head);
|
f(this, local, head);
|
||||||
w!(this, ".{}", x);
|
w!(this, ".{}", it);
|
||||||
}
|
}
|
||||||
ProjectionElem::Index(l) => {
|
ProjectionElem::Index(l) => {
|
||||||
f(this, local, head);
|
f(this, local, head);
|
||||||
w!(this, "[{}]", this.local_name(*l).display(this.db));
|
w!(this, "[{}]", this.local_name(*l).display(this.db));
|
||||||
}
|
}
|
||||||
x => {
|
it => {
|
||||||
f(this, local, head);
|
f(this, local, head);
|
||||||
w!(this, ".{:?}", x);
|
w!(this, ".{:?}", it);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -356,14 +356,14 @@ impl<'a> MirPrettyCtx<'a> {
|
||||||
}
|
}
|
||||||
self.place(p);
|
self.place(p);
|
||||||
}
|
}
|
||||||
Rvalue::Aggregate(AggregateKind::Tuple(_), x) => {
|
Rvalue::Aggregate(AggregateKind::Tuple(_), it) => {
|
||||||
w!(self, "(");
|
w!(self, "(");
|
||||||
self.operand_list(x);
|
self.operand_list(it);
|
||||||
w!(self, ")");
|
w!(self, ")");
|
||||||
}
|
}
|
||||||
Rvalue::Aggregate(AggregateKind::Array(_), x) => {
|
Rvalue::Aggregate(AggregateKind::Array(_), it) => {
|
||||||
w!(self, "[");
|
w!(self, "[");
|
||||||
self.operand_list(x);
|
self.operand_list(it);
|
||||||
w!(self, "]");
|
w!(self, "]");
|
||||||
}
|
}
|
||||||
Rvalue::Repeat(op, len) => {
|
Rvalue::Repeat(op, len) => {
|
||||||
|
@ -371,19 +371,19 @@ impl<'a> MirPrettyCtx<'a> {
|
||||||
self.operand(op);
|
self.operand(op);
|
||||||
w!(self, "; {}]", len.display(self.db));
|
w!(self, "; {}]", len.display(self.db));
|
||||||
}
|
}
|
||||||
Rvalue::Aggregate(AggregateKind::Adt(_, _), x) => {
|
Rvalue::Aggregate(AggregateKind::Adt(_, _), it) => {
|
||||||
w!(self, "Adt(");
|
w!(self, "Adt(");
|
||||||
self.operand_list(x);
|
self.operand_list(it);
|
||||||
w!(self, ")");
|
w!(self, ")");
|
||||||
}
|
}
|
||||||
Rvalue::Aggregate(AggregateKind::Closure(_), x) => {
|
Rvalue::Aggregate(AggregateKind::Closure(_), it) => {
|
||||||
w!(self, "Closure(");
|
w!(self, "Closure(");
|
||||||
self.operand_list(x);
|
self.operand_list(it);
|
||||||
w!(self, ")");
|
w!(self, ")");
|
||||||
}
|
}
|
||||||
Rvalue::Aggregate(AggregateKind::Union(_, _), x) => {
|
Rvalue::Aggregate(AggregateKind::Union(_, _), it) => {
|
||||||
w!(self, "Union(");
|
w!(self, "Union(");
|
||||||
self.operand_list(x);
|
self.operand_list(it);
|
||||||
w!(self, ")");
|
w!(self, ")");
|
||||||
}
|
}
|
||||||
Rvalue::Len(p) => {
|
Rvalue::Len(p) => {
|
||||||
|
@ -428,8 +428,8 @@ impl<'a> MirPrettyCtx<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn operand_list(&mut self, x: &[Operand]) {
|
fn operand_list(&mut self, it: &[Operand]) {
|
||||||
let mut it = x.iter();
|
let mut it = it.iter();
|
||||||
if let Some(first) = it.next() {
|
if let Some(first) = it.next() {
|
||||||
self.operand(first);
|
self.operand(first);
|
||||||
for op in it {
|
for op in it {
|
||||||
|
|
|
@ -30,7 +30,7 @@ use syntax::{
|
||||||
ast::{self, AstNode, HasName},
|
ast::{self, AstNode, HasName},
|
||||||
SyntaxNode,
|
SyntaxNode,
|
||||||
};
|
};
|
||||||
use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Registry};
|
use tracing_subscriber::{layer::SubscriberExt, Registry};
|
||||||
use tracing_tree::HierarchicalLayer;
|
use tracing_tree::HierarchicalLayer;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
|
@ -52,7 +52,8 @@ fn setup_tracing() -> Option<tracing::subscriber::DefaultGuard> {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let filter = EnvFilter::from_env("CHALK_DEBUG");
|
let filter: tracing_subscriber::filter::Targets =
|
||||||
|
env::var("CHALK_DEBUG").ok().and_then(|it| it.parse().ok()).unwrap_or_default();
|
||||||
let layer = HierarchicalLayer::default()
|
let layer = HierarchicalLayer::default()
|
||||||
.with_indent_lines(true)
|
.with_indent_lines(true)
|
||||||
.with_ansi(false)
|
.with_ansi(false)
|
||||||
|
@ -205,7 +206,9 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
|
||||||
let Some(node) = (match expr_or_pat {
|
let Some(node) = (match expr_or_pat {
|
||||||
hir_def::hir::ExprOrPatId::ExprId(expr) => expr_node(&body_source_map, expr, &db),
|
hir_def::hir::ExprOrPatId::ExprId(expr) => expr_node(&body_source_map, expr, &db),
|
||||||
hir_def::hir::ExprOrPatId::PatId(pat) => pat_node(&body_source_map, pat, &db),
|
hir_def::hir::ExprOrPatId::PatId(pat) => pat_node(&body_source_map, pat, &db),
|
||||||
}) else { continue; };
|
}) else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
let range = node.as_ref().original_file_range(&db);
|
let range = node.as_ref().original_file_range(&db);
|
||||||
let actual = format!(
|
let actual = format!(
|
||||||
"expected {}, got {}",
|
"expected {}, got {}",
|
||||||
|
|
|
@ -202,11 +202,11 @@ fn expr_macro_def_expanded_in_various_places() {
|
||||||
100..119 'for _ ...!() {}': IntoIterator::IntoIter<isize>
|
100..119 'for _ ...!() {}': IntoIterator::IntoIter<isize>
|
||||||
100..119 'for _ ...!() {}': &mut IntoIterator::IntoIter<isize>
|
100..119 'for _ ...!() {}': &mut IntoIterator::IntoIter<isize>
|
||||||
100..119 'for _ ...!() {}': fn next<IntoIterator::IntoIter<isize>>(&mut IntoIterator::IntoIter<isize>) -> Option<<IntoIterator::IntoIter<isize> as Iterator>::Item>
|
100..119 'for _ ...!() {}': fn next<IntoIterator::IntoIter<isize>>(&mut IntoIterator::IntoIter<isize>) -> Option<<IntoIterator::IntoIter<isize> as Iterator>::Item>
|
||||||
100..119 'for _ ...!() {}': Option<Iterator::Item<IntoIterator::IntoIter<isize>>>
|
100..119 'for _ ...!() {}': Option<IntoIterator::Item<isize>>
|
||||||
100..119 'for _ ...!() {}': ()
|
100..119 'for _ ...!() {}': ()
|
||||||
100..119 'for _ ...!() {}': ()
|
100..119 'for _ ...!() {}': ()
|
||||||
100..119 'for _ ...!() {}': ()
|
100..119 'for _ ...!() {}': ()
|
||||||
104..105 '_': Iterator::Item<IntoIterator::IntoIter<isize>>
|
104..105 '_': IntoIterator::Item<isize>
|
||||||
117..119 '{}': ()
|
117..119 '{}': ()
|
||||||
124..134 '|| spam!()': impl Fn() -> isize
|
124..134 '|| spam!()': impl Fn() -> isize
|
||||||
140..156 'while ...!() {}': ()
|
140..156 'while ...!() {}': ()
|
||||||
|
@ -293,11 +293,11 @@ fn expr_macro_rules_expanded_in_various_places() {
|
||||||
114..133 'for _ ...!() {}': IntoIterator::IntoIter<isize>
|
114..133 'for _ ...!() {}': IntoIterator::IntoIter<isize>
|
||||||
114..133 'for _ ...!() {}': &mut IntoIterator::IntoIter<isize>
|
114..133 'for _ ...!() {}': &mut IntoIterator::IntoIter<isize>
|
||||||
114..133 'for _ ...!() {}': fn next<IntoIterator::IntoIter<isize>>(&mut IntoIterator::IntoIter<isize>) -> Option<<IntoIterator::IntoIter<isize> as Iterator>::Item>
|
114..133 'for _ ...!() {}': fn next<IntoIterator::IntoIter<isize>>(&mut IntoIterator::IntoIter<isize>) -> Option<<IntoIterator::IntoIter<isize> as Iterator>::Item>
|
||||||
114..133 'for _ ...!() {}': Option<Iterator::Item<IntoIterator::IntoIter<isize>>>
|
114..133 'for _ ...!() {}': Option<IntoIterator::Item<isize>>
|
||||||
114..133 'for _ ...!() {}': ()
|
114..133 'for _ ...!() {}': ()
|
||||||
114..133 'for _ ...!() {}': ()
|
114..133 'for _ ...!() {}': ()
|
||||||
114..133 'for _ ...!() {}': ()
|
114..133 'for _ ...!() {}': ()
|
||||||
118..119 '_': Iterator::Item<IntoIterator::IntoIter<isize>>
|
118..119 '_': IntoIterator::Item<isize>
|
||||||
131..133 '{}': ()
|
131..133 '{}': ()
|
||||||
138..148 '|| spam!()': impl Fn() -> isize
|
138..148 '|| spam!()': impl Fn() -> isize
|
||||||
154..170 'while ...!() {}': ()
|
154..170 'while ...!() {}': ()
|
||||||
|
|
|
@ -1215,6 +1215,52 @@ fn main() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn inherent_method_deref_raw() {
|
||||||
|
check_types(
|
||||||
|
r#"
|
||||||
|
struct Val;
|
||||||
|
|
||||||
|
impl Val {
|
||||||
|
pub fn method(self: *const Val) -> u32 {
|
||||||
|
0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let foo: *const Val;
|
||||||
|
foo.method();
|
||||||
|
// ^^^^^^^^^^^^ u32
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn trait_method_deref_raw() {
|
||||||
|
check_types(
|
||||||
|
r#"
|
||||||
|
trait Trait {
|
||||||
|
fn method(self: *const Self) -> u32;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Val;
|
||||||
|
|
||||||
|
impl Trait for Val {
|
||||||
|
fn method(self: *const Self) -> u32 {
|
||||||
|
0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let foo: *const Val;
|
||||||
|
foo.method();
|
||||||
|
// ^^^^^^^^^^^^ u32
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn method_on_dyn_impl() {
|
fn method_on_dyn_impl() {
|
||||||
check_types(
|
check_types(
|
||||||
|
|
|
@ -1240,11 +1240,11 @@ fn test() {
|
||||||
16..66 'for _ ... }': IntoIterator::IntoIter<()>
|
16..66 'for _ ... }': IntoIterator::IntoIter<()>
|
||||||
16..66 'for _ ... }': &mut IntoIterator::IntoIter<()>
|
16..66 'for _ ... }': &mut IntoIterator::IntoIter<()>
|
||||||
16..66 'for _ ... }': fn next<IntoIterator::IntoIter<()>>(&mut IntoIterator::IntoIter<()>) -> Option<<IntoIterator::IntoIter<()> as Iterator>::Item>
|
16..66 'for _ ... }': fn next<IntoIterator::IntoIter<()>>(&mut IntoIterator::IntoIter<()>) -> Option<<IntoIterator::IntoIter<()> as Iterator>::Item>
|
||||||
16..66 'for _ ... }': Option<Iterator::Item<IntoIterator::IntoIter<()>>>
|
16..66 'for _ ... }': Option<IntoIterator::Item<()>>
|
||||||
16..66 'for _ ... }': ()
|
16..66 'for _ ... }': ()
|
||||||
16..66 'for _ ... }': ()
|
16..66 'for _ ... }': ()
|
||||||
16..66 'for _ ... }': ()
|
16..66 'for _ ... }': ()
|
||||||
20..21 '_': Iterator::Item<IntoIterator::IntoIter<()>>
|
20..21 '_': IntoIterator::Item<()>
|
||||||
25..39 '{ let x = 0; }': ()
|
25..39 '{ let x = 0; }': ()
|
||||||
31..32 'x': i32
|
31..32 'x': i32
|
||||||
35..36 '0': i32
|
35..36 '0': i32
|
||||||
|
|
|
@ -4148,6 +4148,30 @@ where
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn gats_in_bounds_for_assoc() {
|
||||||
|
check_types(
|
||||||
|
r#"
|
||||||
|
trait Trait {
|
||||||
|
type Assoc: Another<Gat<i32> = usize>;
|
||||||
|
type Assoc2<T>: Another<Gat<T> = T>;
|
||||||
|
}
|
||||||
|
trait Another {
|
||||||
|
type Gat<T>;
|
||||||
|
fn foo(&self) -> Self::Gat<i32>;
|
||||||
|
fn bar<T>(&self) -> Self::Gat<T>;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test<T: Trait>(a: T::Assoc, b: T::Assoc2<isize>) {
|
||||||
|
let v = a.foo();
|
||||||
|
//^ usize
|
||||||
|
let v = b.bar::<isize>();
|
||||||
|
//^ isize
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn bin_op_with_scalar_fallback() {
|
fn bin_op_with_scalar_fallback() {
|
||||||
// Extra impls are significant so that chalk doesn't give us definite guidances.
|
// Extra impls are significant so that chalk doesn't give us definite guidances.
|
||||||
|
|
|
@ -170,7 +170,7 @@ fn solve(
|
||||||
|
|
||||||
struct LoggingRustIrDatabaseLoggingOnDrop<'a>(LoggingRustIrDatabase<Interner, ChalkContext<'a>>);
|
struct LoggingRustIrDatabaseLoggingOnDrop<'a>(LoggingRustIrDatabase<Interner, ChalkContext<'a>>);
|
||||||
|
|
||||||
impl<'a> Drop for LoggingRustIrDatabaseLoggingOnDrop<'a> {
|
impl Drop for LoggingRustIrDatabaseLoggingOnDrop<'_> {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
eprintln!("chalk program:\n{}", self.0);
|
eprintln!("chalk program:\n{}", self.0);
|
||||||
}
|
}
|
||||||
|
|
|
@ -89,7 +89,7 @@ struct SuperTraits<'a> {
|
||||||
seen: FxHashSet<ChalkTraitId>,
|
seen: FxHashSet<ChalkTraitId>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> SuperTraits<'a> {
|
impl SuperTraits<'_> {
|
||||||
fn elaborate(&mut self, trait_ref: &TraitRef) {
|
fn elaborate(&mut self, trait_ref: &TraitRef) {
|
||||||
direct_super_trait_refs(self.db, trait_ref, |trait_ref| {
|
direct_super_trait_refs(self.db, trait_ref, |trait_ref| {
|
||||||
if !self.seen.contains(&trait_ref.trait_id) {
|
if !self.seen.contains(&trait_ref.trait_id) {
|
||||||
|
@ -99,7 +99,7 @@ impl<'a> SuperTraits<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Iterator for SuperTraits<'a> {
|
impl Iterator for SuperTraits<'_> {
|
||||||
type Item = TraitRef;
|
type Item = TraitRef;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
|
|
@ -18,7 +18,7 @@ arrayvec = "0.7.2"
|
||||||
itertools = "0.10.5"
|
itertools = "0.10.5"
|
||||||
smallvec.workspace = true
|
smallvec.workspace = true
|
||||||
triomphe.workspace = true
|
triomphe.workspace = true
|
||||||
once_cell = "1.17.0"
|
once_cell = "1.17.1"
|
||||||
|
|
||||||
# local deps
|
# local deps
|
||||||
base-db.workspace = true
|
base-db.workspace = true
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue