Rollup merge of #120636 - lnicola:sync-from-ra, r=lnicola

Subtree update of `rust-analyzer`

r? ghost
This commit is contained in:
Matthias Krüger 2024-02-05 06:37:17 +01:00 committed by GitHub
commit 1fbd46a5e8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
197 changed files with 3106 additions and 2007 deletions

View file

@ -90,7 +90,7 @@ jobs:
- name: Switch to stable toolchain - name: Switch to stable toolchain
run: | run: |
rustup update --no-self-update stable rustup update --no-self-update stable
rustup component add --toolchain stable rust-src rustup component add --toolchain stable rust-src clippy
rustup default stable rustup default stable
- name: Run analysis-stats on rust-analyzer - name: Run analysis-stats on rust-analyzer
@ -103,6 +103,10 @@ jobs:
RUSTC_BOOTSTRAP: 1 RUSTC_BOOTSTRAP: 1
run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std
- name: clippy
if: matrix.os == 'ubuntu-latest'
run: cargo clippy --all-targets
# Weird targets to catch non-portable code # Weird targets to catch non-portable code
rust-cross: rust-cross:
if: github.repository == 'rust-lang/rust-analyzer' if: github.repository == 'rust-lang/rust-analyzer'
@ -203,11 +207,25 @@ jobs:
working-directory: ./editors/code working-directory: ./editors/code
if: needs.changes.outputs.typescript == 'true' if: needs.changes.outputs.typescript == 'true'
typo-check:
name: Typo Check
runs-on: ubuntu-latest
timeout-minutes: 10
env:
FORCE_COLOR: 1
TYPOS_VERSION: v1.18.0
steps:
- name: download typos
run: curl -LsSf https://github.com/crate-ci/typos/releases/download/$TYPOS_VERSION/typos-$TYPOS_VERSION-x86_64-unknown-linux-musl.tar.gz | tar zxf - -C ${CARGO_HOME:-~/.cargo}/bin
- name: check for typos
run: typos
end-success: end-success:
name: bors build finished name: bors build finished
if: github.event.pusher.name == 'bors' && success() if: github.event.pusher.name == 'bors' && success()
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: [rust, rust-cross, typescript] needs: [rust, rust-cross, typescript, typo-check]
steps: steps:
- name: Mark the job as successful - name: Mark the job as successful
run: exit 0 run: exit 0
@ -216,7 +234,7 @@ jobs:
name: bors build finished name: bors build finished
if: github.event.pusher.name == 'bors' && !success() if: github.event.pusher.name == 'bors' && !success()
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: [rust, rust-cross, typescript] needs: [rust, rust-cross, typescript, typo-check]
steps: steps:
- name: Mark the job as a failure - name: Mark the job as a failure
run: exit 1 run: exit 1

View file

@ -43,10 +43,10 @@ jobs:
- os: ubuntu-20.04 - os: ubuntu-20.04
target: arm-unknown-linux-gnueabihf target: arm-unknown-linux-gnueabihf
code-target: linux-armhf code-target: linux-armhf
- os: macos-11 - os: macos-12
target: x86_64-apple-darwin target: x86_64-apple-darwin
code-target: darwin-x64 code-target: darwin-x64
- os: macos-11 - os: macos-12
target: aarch64-apple-darwin target: aarch64-apple-darwin
code-target: darwin-arm64 code-target: darwin-arm64

31
.typos.toml Normal file
View file

@ -0,0 +1,31 @@
[default.extend-identifiers]
AnserStyle = "AnserStyle"
datas = "datas"
impl_froms = "impl_froms"
selfs = "selfs"
[default.extend-words]
anser = "anser"
ba = "ba"
fo = "fo"
ket = "ket"
makro = "makro"
raison = "raison"
trivias = "trivias"
TOOD = "TOOD"
[default]
extend-ignore-re = [
# ignore string which contains $x (x is a num), which use widely in test
".*\\$\\d.*",
# ignore generated content like `boxed....nner()`, `Defaul...efault`
"\\w*\\.{3,4}\\w*",
]
[files]
extend-exclude = [
"*.json",
"*.rast",
"crates/parser/test_data/lexer/err/*",
"bench_data/*",
]

77
Cargo.lock generated
View file

@ -19,11 +19,11 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]] [[package]]
name = "always-assert" name = "always-assert"
version = "0.1.3" version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4436e0292ab1bb631b42973c61205e704475fe8126af845c8d923c0996328127" checksum = "a1078fa1ce1e34b1872d8611ad921196d76bdd7027e949fbe31231abde201892"
dependencies = [ dependencies = [
"log", "tracing",
] ]
[[package]] [[package]]
@ -78,6 +78,7 @@ dependencies = [
"span", "span",
"stdx", "stdx",
"syntax", "syntax",
"tracing",
"triomphe", "triomphe",
"vfs", "vfs",
] ]
@ -494,8 +495,10 @@ dependencies = [
"profile", "profile",
"rustc-hash", "rustc-hash",
"smallvec", "smallvec",
"span",
"stdx", "stdx",
"syntax", "syntax",
"tracing",
"triomphe", "triomphe",
"tt", "tt",
] ]
@ -592,7 +595,7 @@ dependencies = [
"profile", "profile",
"project-model", "project-model",
"ra-ap-rustc_abi", "ra-ap-rustc_abi",
"ra-ap-rustc_index", "ra-ap-rustc_index 0.35.0",
"ra-ap-rustc_pattern_analysis", "ra-ap-rustc_pattern_analysis",
"rustc-hash", "rustc-hash",
"scoped-tls", "scoped-tls",
@ -670,6 +673,7 @@ dependencies = [
"test-fixture", "test-fixture",
"test-utils", "test-utils",
"text-edit", "text-edit",
"tracing",
] ]
[[package]] [[package]]
@ -690,6 +694,7 @@ dependencies = [
"test-fixture", "test-fixture",
"test-utils", "test-utils",
"text-edit", "text-edit",
"tracing",
] ]
[[package]] [[package]]
@ -747,6 +752,7 @@ dependencies = [
"test-fixture", "test-fixture",
"test-utils", "test-utils",
"text-edit", "text-edit",
"tracing",
] ]
[[package]] [[package]]
@ -1342,6 +1348,7 @@ dependencies = [
"once_cell", "once_cell",
"perf-event", "perf-event",
"tikv-jemalloc-ctl", "tikv-jemalloc-ctl",
"tracing",
"winapi", "winapi",
] ]
@ -1419,12 +1426,12 @@ dependencies = [
[[package]] [[package]]
name = "ra-ap-rustc_abi" name = "ra-ap-rustc_abi"
version = "0.33.0" version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ce9100fc66c6c60aeeb076868ead9c2eaa65d6a5a90404f08c242327a92ff4b" checksum = "3c0baa423a2c2bfd6e4bd40e7215f7ddebd12a649ce0b65078a38b91068895aa"
dependencies = [ dependencies = [
"bitflags 2.4.1", "bitflags 2.4.1",
"ra-ap-rustc_index", "ra-ap-rustc_index 0.35.0",
"tracing", "tracing",
] ]
@ -1435,7 +1442,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e5313d7f243b63ef9e58d94355b11aa8499f1328055f1f58adf0a5ea7d2faca" checksum = "5e5313d7f243b63ef9e58d94355b11aa8499f1328055f1f58adf0a5ea7d2faca"
dependencies = [ dependencies = [
"arrayvec", "arrayvec",
"ra-ap-rustc_index_macros", "ra-ap-rustc_index_macros 0.33.0",
"smallvec",
]
[[package]]
name = "ra-ap-rustc_index"
version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "322b751895cc4a0a2ee0c6ab36ec80bc8abf5f8d76254c482f96f03c27c92ebe"
dependencies = [
"arrayvec",
"ra-ap-rustc_index_macros 0.35.0",
"smallvec", "smallvec",
] ]
@ -1452,10 +1470,22 @@ dependencies = [
] ]
[[package]] [[package]]
name = "ra-ap-rustc_lexer" name = "ra-ap-rustc_index_macros"
version = "0.33.0" version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2d221356e5717595e8a0afa5fba1620dcb4032ab784dc4d98fdc7284e3feb66" checksum = "054e25eac52f0506c1309ca4317c11ad4925d7b99eb897f71aa7c3cbafb46c2b"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.39",
"synstructure",
]
[[package]]
name = "ra-ap-rustc_lexer"
version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8da0fa51a1a97ba4296a1c78fa454815a153b472e2546b6338a0902ad59e015"
dependencies = [ dependencies = [
"unicode-properties", "unicode-properties",
"unicode-xid", "unicode-xid",
@ -1463,11 +1493,11 @@ dependencies = [
[[package]] [[package]]
name = "ra-ap-rustc_parse_format" name = "ra-ap-rustc_parse_format"
version = "0.33.0" version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab62fc925612374103b4f178da347b535b35d9eb1ff5ba42105c990b2e25a164" checksum = "3851f930a54adcb76889983dcd5c00a0c4e206e190e1384dbc00d49b82dfb45e"
dependencies = [ dependencies = [
"ra-ap-rustc_index", "ra-ap-rustc_index 0.35.0",
"ra-ap-rustc_lexer", "ra-ap-rustc_lexer",
] ]
@ -1478,7 +1508,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c4085e0c771fd4b883930b599ef42966b855762bbe4052c17673b3253421a6d" checksum = "6c4085e0c771fd4b883930b599ef42966b855762bbe4052c17673b3253421a6d"
dependencies = [ dependencies = [
"derivative", "derivative",
"ra-ap-rustc_index", "ra-ap-rustc_index 0.33.0",
"rustc-hash", "rustc-hash",
"rustc_apfloat", "rustc_apfloat",
"smallvec", "smallvec",
@ -1581,7 +1611,6 @@ dependencies = [
"tikv-jemallocator", "tikv-jemallocator",
"toolchain", "toolchain",
"tracing", "tracing",
"tracing-log",
"tracing-subscriber", "tracing-subscriber",
"tracing-tree", "tracing-tree",
"triomphe", "triomphe",
@ -1595,26 +1624,26 @@ dependencies = [
[[package]] [[package]]
name = "rust-analyzer-salsa" name = "rust-analyzer-salsa"
version = "0.17.0-pre.5" version = "0.17.0-pre.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca9d387a9801f4fb9b366789ad1bfc08448cafc49cf148d907cfcd88ab665d7f" checksum = "719825638c59fd26a55412a24561c7c5bcf54364c88b9a7a04ba08a6eafaba8d"
dependencies = [ dependencies = [
"indexmap", "indexmap",
"lock_api", "lock_api",
"log",
"oorandom", "oorandom",
"parking_lot", "parking_lot",
"rust-analyzer-salsa-macros", "rust-analyzer-salsa-macros",
"rustc-hash", "rustc-hash",
"smallvec", "smallvec",
"tracing",
"triomphe", "triomphe",
] ]
[[package]] [[package]]
name = "rust-analyzer-salsa-macros" name = "rust-analyzer-salsa-macros"
version = "0.17.0-pre.5" version = "0.17.0-pre.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2035f385d7fae31e9b086f40b272ee1d79c484472f31c9a10348a406e841eaf" checksum = "4d96498e9684848c6676c399032ebc37c52da95ecbefa83d71ccc53b9f8a4a8e"
dependencies = [ dependencies = [
"heck", "heck",
"proc-macro2", "proc-macro2",
@ -1661,9 +1690,9 @@ dependencies = [
[[package]] [[package]]
name = "scip" name = "scip"
version = "0.3.1" version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e84d21062a3ba08d58870c8c36b0c005b2b2261c6ad1bf7042585427c781883" checksum = "e5dc1bd66649133af84ab62436ddd2856c2605182b02dec2cd197f684dfe15ef"
dependencies = [ dependencies = [
"protobuf", "protobuf",
] ]
@ -1863,6 +1892,7 @@ dependencies = [
"stdx", "stdx",
"test-utils", "test-utils",
"text-edit", "text-edit",
"tracing",
"triomphe", "triomphe",
"ungrammar", "ungrammar",
] ]
@ -1890,6 +1920,7 @@ dependencies = [
"rustc-hash", "rustc-hash",
"stdx", "stdx",
"text-size", "text-size",
"tracing",
] ]
[[package]] [[package]]

View file

@ -79,10 +79,10 @@ tt = { path = "./crates/tt", version = "0.0.0" }
vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" } vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
vfs = { path = "./crates/vfs", version = "0.0.0" } vfs = { path = "./crates/vfs", version = "0.0.0" }
ra-ap-rustc_lexer = { version = "0.33.0", default-features = false } ra-ap-rustc_lexer = { version = "0.35.0", default-features = false }
ra-ap-rustc_parse_format = { version = "0.33.0", default-features = false } ra-ap-rustc_parse_format = { version = "0.35.0", default-features = false }
ra-ap-rustc_index = { version = "0.33.0", default-features = false } ra-ap-rustc_index = { version = "0.35.0", default-features = false }
ra-ap-rustc_abi = { version = "0.33.0", default-features = false } ra-ap-rustc_abi = { version = "0.35.0", default-features = false }
ra-ap-rustc_pattern_analysis = { version = "0.33.0", default-features = false } ra-ap-rustc_pattern_analysis = { version = "0.33.0", default-features = false }
# local crates that aren't published to crates.io. These should not have versions. # local crates that aren't published to crates.io. These should not have versions.
@ -113,7 +113,7 @@ itertools = "0.12.0"
libc = "0.2.150" libc = "0.2.150"
nohash-hasher = "0.2.0" nohash-hasher = "0.2.0"
rayon = "1.8.0" rayon = "1.8.0"
rust-analyzer-salsa = "0.17.0-pre.5" rust-analyzer-salsa = "0.17.0-pre.6"
rustc-hash = "1.1.0" rustc-hash = "1.1.0"
semver = "1.0.14" semver = "1.0.14"
serde = { version = "1.0.192", features = ["derive"] } serde = { version = "1.0.192", features = ["derive"] }
@ -128,9 +128,9 @@ text-size = "1.1.1"
tracing = "0.1.40" tracing = "0.1.40"
tracing-tree = "0.3.0" tracing-tree = "0.3.0"
tracing-subscriber = { version = "0.3.18", default-features = false, features = [ tracing-subscriber = { version = "0.3.18", default-features = false, features = [
"registry", "registry",
"fmt", "fmt",
"tracing-log", "tracing-log",
] } ] }
triomphe = { version = "0.1.10", default-features = false, features = ["std"] } triomphe = { version = "0.1.10", default-features = false, features = ["std"] }
xshell = "0.2.5" xshell = "0.2.5"
@ -167,29 +167,14 @@ new_ret_no_self = "allow"
## Following lints should be tackled at some point ## Following lints should be tackled at some point
borrowed_box = "allow" borrowed_box = "allow"
borrow_deref_ref = "allow"
derivable_impls = "allow"
derived_hash_with_manual_eq = "allow" derived_hash_with_manual_eq = "allow"
field_reassign_with_default = "allow"
forget_non_drop = "allow" forget_non_drop = "allow"
format_collect = "allow"
large_enum_variant = "allow"
needless_doctest_main = "allow" needless_doctest_main = "allow"
new_without_default = "allow"
non_canonical_clone_impl = "allow" non_canonical_clone_impl = "allow"
non_canonical_partial_ord_impl = "allow" non_canonical_partial_ord_impl = "allow"
self_named_constructors = "allow" self_named_constructors = "allow"
skip_while_next = "allow"
too_many_arguments = "allow" too_many_arguments = "allow"
toplevel_ref_arg = "allow"
type_complexity = "allow" type_complexity = "allow"
unnecessary_cast = "allow"
unnecessary_filter_map = "allow"
unnecessary_lazy_evaluations = "allow"
unnecessary_mut_passed = "allow"
useless_conversion = "allow"
useless_format = "allow"
wildcard_in_or_patterns = "allow"
wrong_self_convention = "allow" wrong_self_convention = "allow"
## warn at following lints ## warn at following lints

View file

@ -17,6 +17,7 @@ rust-analyzer-salsa.workspace = true
rustc-hash.workspace = true rustc-hash.workspace = true
triomphe.workspace = true triomphe.workspace = true
semver.workspace = true semver.workspace = true
tracing.workspace = true
# local deps # local deps
cfg.workspace = true cfg.workspace = true

View file

@ -51,7 +51,7 @@ impl FileChange {
} }
pub fn apply(self, db: &mut dyn SourceDatabaseExt) { pub fn apply(self, db: &mut dyn SourceDatabaseExt) {
let _p = profile::span("RootDatabase::apply_change"); let _p = tracing::span!(tracing::Level::INFO, "RootDatabase::apply_change").entered();
if let Some(roots) = self.roots { if let Some(roots) = self.roots {
for (idx, root) in roots.into_iter().enumerate() { for (idx, root) in roots.into_iter().enumerate() {
let root_id = SourceRootId(idx as u32); let root_id = SourceRootId(idx as u32);

View file

@ -494,7 +494,7 @@ impl CrateGraph {
from: CrateId, from: CrateId,
dep: Dependency, dep: Dependency,
) -> Result<(), CyclicDependenciesError> { ) -> Result<(), CyclicDependenciesError> {
let _p = profile::span("add_dep"); let _p = tracing::span!(tracing::Level::INFO, "add_dep").entered();
self.check_cycle_after_dependency(from, dep.crate_id)?; self.check_cycle_after_dependency(from, dep.crate_id)?;

View file

@ -65,7 +65,7 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug {
} }
fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> { fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
let _p = profile::span("parse_query").detail(|| format!("{file_id:?}")); let _p = tracing::span!(tracing::Level::INFO, "parse_query", ?file_id).entered();
let text = db.file_text(file_id); let text = db.file_text(file_id);
SourceFile::parse(&text) SourceFile::parse(&text)
} }
@ -116,7 +116,7 @@ impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
} }
fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> { fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> {
let _p = profile::span("relevant_crates"); let _p = tracing::span!(tracing::Level::INFO, "relevant_crates").entered();
let source_root = self.0.file_source_root(file_id); let source_root = self.0.file_source_root(file_id);
self.0.source_root_crates(source_root) self.0.source_root_crates(source_root)
} }

View file

@ -493,7 +493,9 @@ impl CargoActor {
// Skip certain kinds of messages to only spend time on what's useful // Skip certain kinds of messages to only spend time on what's useful
JsonMessage::Cargo(message) => match message { JsonMessage::Cargo(message) => match message {
cargo_metadata::Message::CompilerArtifact(artifact) if !artifact.fresh => { cargo_metadata::Message::CompilerArtifact(artifact) if !artifact.fresh => {
self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap(); self.sender
.send(CargoMessage::CompilerArtifact(Box::new(artifact)))
.unwrap();
} }
cargo_metadata::Message::CompilerMessage(msg) => { cargo_metadata::Message::CompilerMessage(msg) => {
self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap(); self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap();
@ -538,7 +540,7 @@ impl CargoActor {
} }
enum CargoMessage { enum CargoMessage {
CompilerArtifact(cargo_metadata::Artifact), CompilerArtifact(Box<cargo_metadata::Artifact>),
Diagnostic(Diagnostic), Diagnostic(Diagnostic),
} }

View file

@ -75,7 +75,7 @@ impl Attrs {
db: &dyn DefDatabase, db: &dyn DefDatabase,
v: VariantId, v: VariantId,
) -> Arc<ArenaMap<LocalFieldId, Attrs>> { ) -> Arc<ArenaMap<LocalFieldId, Attrs>> {
let _p = profile::span("fields_attrs_query"); let _p = tracing::span!(tracing::Level::INFO, "fields_attrs_query").entered();
// FIXME: There should be some proper form of mapping between item tree field ids and hir field ids // FIXME: There should be some proper form of mapping between item tree field ids and hir field ids
let mut res = ArenaMap::default(); let mut res = ArenaMap::default();
@ -322,7 +322,7 @@ impl AttrsWithOwner {
} }
pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Attrs { pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Attrs {
let _p = profile::span("attrs_query"); let _p = tracing::span!(tracing::Level::INFO, "attrs_query").entered();
// FIXME: this should use `Trace` to avoid duplication in `source_map` below // FIXME: this should use `Trace` to avoid duplication in `source_map` below
let raw_attrs = match def { let raw_attrs = match def {
AttrDefId::ModuleId(module) => { AttrDefId::ModuleId(module) => {

View file

@ -122,7 +122,7 @@ impl Body {
db: &dyn DefDatabase, db: &dyn DefDatabase,
def: DefWithBodyId, def: DefWithBodyId,
) -> (Arc<Body>, Arc<BodySourceMap>) { ) -> (Arc<Body>, Arc<BodySourceMap>) {
let _p = profile::span("body_with_source_map_query"); let _p = tracing::span!(tracing::Level::INFO, "body_with_source_map_query").entered();
let mut params = None; let mut params = None;
let mut is_async_fn = false; let mut is_async_fn = false;

View file

@ -33,7 +33,7 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo
} }
) )
}), }),
DefWithBodyId::InTypeConstId(_) => format!("In type const = "), DefWithBodyId::InTypeConstId(_) => "In type const = ".to_string(),
DefWithBodyId::VariantId(it) => { DefWithBodyId::VariantId(it) => {
let loc = it.lookup(db); let loc = it.lookup(db);
let enum_loc = loc.parent.lookup(db); let enum_loc = loc.parent.lookup(db);

View file

@ -256,7 +256,7 @@ impl SsrError {
"##, "##,
); );
assert_eq!(db.body_with_source_map(def.into()).1.diagnostics(), &[]); assert_eq!(db.body_with_source_map(def).1.diagnostics(), &[]);
expect![[r#" expect![[r#"
fn main() { fn main() {
_ = $crate::error::SsrError::new( _ = $crate::error::SsrError::new(
@ -309,7 +309,7 @@ fn f() {
"#, "#,
); );
let (_, source_map) = db.body_with_source_map(def.into()); let (_, source_map) = db.body_with_source_map(def);
assert_eq!(source_map.diagnostics(), &[]); assert_eq!(source_map.diagnostics(), &[]);
for (_, def_map) in body.blocks(&db) { for (_, def_map) in body.blocks(&db) {

View file

@ -340,7 +340,7 @@ impl ImplData {
db: &dyn DefDatabase, db: &dyn DefDatabase,
id: ImplId, id: ImplId,
) -> (Arc<ImplData>, DefDiagnostics) { ) -> (Arc<ImplData>, DefDiagnostics) {
let _p = profile::span("impl_data_with_diagnostics_query"); let _p = tracing::span!(tracing::Level::INFO, "impl_data_with_diagnostics_query").entered();
let ItemLoc { container: module_id, id: tree_id } = id.lookup(db); let ItemLoc { container: module_id, id: tree_id } = id.lookup(db);
let item_tree = tree_id.item_tree(db); let item_tree = tree_id.item_tree(db);
@ -782,7 +782,7 @@ impl<'a> AssocItemCollector<'a> {
self.diagnostics.push(DefDiagnostic::macro_expansion_parse_error( self.diagnostics.push(DefDiagnostic::macro_expansion_parse_error(
self.module_id.local_id, self.module_id.local_id,
error_call_kind(), error_call_kind(),
errors.into(), errors,
)); ));
} }

View file

@ -1,9 +1,10 @@
//! Defines database & queries for name resolution. //! Defines database & queries for name resolution.
use base_db::{salsa, CrateId, SourceDatabase, Upcast}; use base_db::{salsa, CrateId, FileId, SourceDatabase, Upcast};
use either::Either; use either::Either;
use hir_expand::{db::ExpandDatabase, HirFileId, MacroDefId}; use hir_expand::{db::ExpandDatabase, HirFileId, MacroDefId};
use intern::Interned; use intern::Interned;
use la_arena::ArenaMap; use la_arena::ArenaMap;
use span::MacroCallId;
use syntax::{ast, AstPtr}; use syntax::{ast, AstPtr};
use triomphe::Arc; use triomphe::Arc;
@ -234,10 +235,26 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
fn crate_notable_traits(&self, krate: CrateId) -> Option<Arc<[TraitId]>>; fn crate_notable_traits(&self, krate: CrateId) -> Option<Arc<[TraitId]>>;
fn crate_supports_no_std(&self, crate_id: CrateId) -> bool; fn crate_supports_no_std(&self, crate_id: CrateId) -> bool;
fn include_macro_invoc(&self, crate_id: CrateId) -> Vec<(MacroCallId, FileId)>;
}
// return: macro call id and include file id
fn include_macro_invoc(db: &dyn DefDatabase, krate: CrateId) -> Vec<(MacroCallId, FileId)> {
db.crate_def_map(krate)
.modules
.values()
.flat_map(|m| m.scope.iter_macro_invoc())
.filter_map(|invoc| {
db.lookup_intern_macro_call(*invoc.1)
.include_file_id(db.upcast(), *invoc.1)
.map(|x| (*invoc.1, x))
})
.collect()
} }
fn crate_def_map_wait(db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> { fn crate_def_map_wait(db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> {
let _p = profile::span("crate_def_map:wait"); let _p = tracing::span!(tracing::Level::INFO, "crate_def_map:wait").entered();
db.crate_def_map_query(krate) db.crate_def_map_query(krate)
} }

View file

@ -13,7 +13,7 @@ use crate::{
item_scope::ItemInNs, item_scope::ItemInNs,
nameres::DefMap, nameres::DefMap,
path::{ModPath, PathKind}, path::{ModPath, PathKind},
visibility::{Visibility, VisibilityExplicity}, visibility::{Visibility, VisibilityExplicitness},
CrateRootModuleId, ModuleDefId, ModuleId, CrateRootModuleId, ModuleDefId, ModuleId,
}; };
@ -26,7 +26,7 @@ pub fn find_path(
prefer_no_std: bool, prefer_no_std: bool,
prefer_prelude: bool, prefer_prelude: bool,
) -> Option<ModPath> { ) -> Option<ModPath> {
let _p = profile::span("find_path"); let _p = tracing::span!(tracing::Level::INFO, "find_path").entered();
find_path_inner(FindPathCtx { db, prefixed: None, prefer_no_std, prefer_prelude }, item, from) find_path_inner(FindPathCtx { db, prefixed: None, prefer_no_std, prefer_prelude }, item, from)
} }
@ -38,7 +38,7 @@ pub fn find_path_prefixed(
prefer_no_std: bool, prefer_no_std: bool,
prefer_prelude: bool, prefer_prelude: bool,
) -> Option<ModPath> { ) -> Option<ModPath> {
let _p = profile::span("find_path_prefixed"); let _p = tracing::span!(tracing::Level::INFO, "find_path_prefixed").entered();
find_path_inner( find_path_inner(
FindPathCtx { db, prefixed: Some(prefix_kind), prefer_no_std, prefer_prelude }, FindPathCtx { db, prefixed: Some(prefix_kind), prefer_no_std, prefer_prelude },
item, item,
@ -497,7 +497,7 @@ fn find_local_import_locations(
item: ItemInNs, item: ItemInNs,
from: ModuleId, from: ModuleId,
) -> Vec<(ModuleId, Name)> { ) -> Vec<(ModuleId, Name)> {
let _p = profile::span("find_local_import_locations"); let _p = tracing::span!(tracing::Level::INFO, "find_local_import_locations").entered();
// `from` can import anything below `from` with visibility of at least `from`, and anything // `from` can import anything below `from` with visibility of at least `from`, and anything
// above `from` with any visibility. That means we do not need to descend into private siblings // above `from` with any visibility. That means we do not need to descend into private siblings
@ -544,11 +544,11 @@ fn find_local_import_locations(
if let Some((name, vis, declared)) = data.scope.name_of(item) { if let Some((name, vis, declared)) = data.scope.name_of(item) {
if vis.is_visible_from(db, from) { if vis.is_visible_from(db, from) {
let is_pub_or_explicit = match vis { let is_pub_or_explicit = match vis {
Visibility::Module(_, VisibilityExplicity::Explicit) => { Visibility::Module(_, VisibilityExplicitness::Explicit) => {
cov_mark::hit!(explicit_private_imports); cov_mark::hit!(explicit_private_imports);
true true
} }
Visibility::Module(_, VisibilityExplicity::Implicit) => { Visibility::Module(_, VisibilityExplicitness::Implicit) => {
cov_mark::hit!(discount_private_imports); cov_mark::hit!(discount_private_imports);
false false
} }

View file

@ -373,7 +373,7 @@ impl GenericParams {
db: &dyn DefDatabase, db: &dyn DefDatabase,
def: GenericDefId, def: GenericDefId,
) -> Interned<GenericParams> { ) -> Interned<GenericParams> {
let _p = profile::span("generic_params_query"); let _p = tracing::span!(tracing::Level::INFO, "generic_params_query").entered();
let krate = def.module(db).krate; let krate = def.module(db).krate;
let cfg_options = db.crate_graph(); let cfg_options = db.crate_graph();

View file

@ -166,6 +166,7 @@ enum PositionUsedAs {
} }
use PositionUsedAs::*; use PositionUsedAs::*;
#[allow(clippy::unnecessary_lazy_evaluations)]
pub(crate) fn parse( pub(crate) fn parse(
s: &ast::String, s: &ast::String,
fmt_snippet: Option<String>, fmt_snippet: Option<String>,
@ -177,9 +178,9 @@ pub(crate) fn parse(
let text = s.text_without_quotes(); let text = s.text_without_quotes();
let str_style = match s.quote_offsets() { let str_style = match s.quote_offsets() {
Some(offsets) => { Some(offsets) => {
let raw = u32::from(offsets.quotes.0.len()) - 1; let raw = usize::from(offsets.quotes.0.len()) - 1;
// subtract 1 for the `r` prefix // subtract 1 for the `r` prefix
(raw != 0).then(|| raw as usize - 1) (raw != 0).then(|| raw - 1)
} }
None => None, None => None,
}; };
@ -214,7 +215,7 @@ pub(crate) fn parse(
let mut used = vec![false; args.explicit_args().len()]; let mut used = vec![false; args.explicit_args().len()];
let mut invalid_refs = Vec::new(); let mut invalid_refs = Vec::new();
let mut numeric_refences_to_named_arg = Vec::new(); let mut numeric_references_to_named_arg = Vec::new();
enum ArgRef<'a> { enum ArgRef<'a> {
Index(usize), Index(usize),
@ -231,7 +232,7 @@ pub(crate) fn parse(
used[index] = true; used[index] = true;
if arg.kind.ident().is_some() { if arg.kind.ident().is_some() {
// This was a named argument, but it was used as a positional argument. // This was a named argument, but it was used as a positional argument.
numeric_refences_to_named_arg.push((index, span, used_as)); numeric_references_to_named_arg.push((index, span, used_as));
} }
Ok(index) Ok(index)
} else { } else {
@ -432,7 +433,7 @@ pub(crate) fn parse(
} }
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Clone, Debug, Default, Eq, PartialEq)]
pub struct FormatArgumentsCollector { pub struct FormatArgumentsCollector {
arguments: Vec<FormatArgument>, arguments: Vec<FormatArgument>,
num_unnamed_args: usize, num_unnamed_args: usize,
@ -451,7 +452,7 @@ impl FormatArgumentsCollector {
} }
pub fn new() -> Self { pub fn new() -> Self {
Self { arguments: vec![], names: vec![], num_unnamed_args: 0, num_explicit_args: 0 } Default::default()
} }
pub fn add(&mut self, arg: FormatArgument) -> usize { pub fn add(&mut self, arg: FormatArgument) -> usize {

View file

@ -75,7 +75,7 @@ impl ImportMap {
} }
pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> { pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
let _p = profile::span("import_map_query"); let _p = tracing::span!(tracing::Level::INFO, "import_map_query").entered();
let map = Self::collect_import_map(db, krate); let map = Self::collect_import_map(db, krate);
@ -126,7 +126,7 @@ impl ImportMap {
} }
fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex { fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex {
let _p = profile::span("collect_import_map"); let _p = tracing::span!(tracing::Level::INFO, "collect_import_map").entered();
let def_map = db.crate_def_map(krate); let def_map = db.crate_def_map(krate);
let mut map = FxIndexMap::default(); let mut map = FxIndexMap::default();
@ -216,7 +216,7 @@ impl ImportMap {
is_type_in_ns: bool, is_type_in_ns: bool,
trait_import_info: &ImportInfo, trait_import_info: &ImportInfo,
) { ) {
let _p = profile::span("collect_trait_assoc_items"); let _p = tracing::span!(tracing::Level::INFO, "collect_trait_assoc_items").entered();
for &(ref assoc_item_name, item) in &db.trait_data(tr).items { for &(ref assoc_item_name, item) in &db.trait_data(tr).items {
let module_def_id = match item { let module_def_id = match item {
AssocItemId::FunctionId(f) => ModuleDefId::from(f), AssocItemId::FunctionId(f) => ModuleDefId::from(f),
@ -297,7 +297,7 @@ impl SearchMode {
SearchMode::Exact => candidate.eq_ignore_ascii_case(query), SearchMode::Exact => candidate.eq_ignore_ascii_case(query),
SearchMode::Prefix => { SearchMode::Prefix => {
query.len() <= candidate.len() && { query.len() <= candidate.len() && {
let prefix = &candidate[..query.len() as usize]; let prefix = &candidate[..query.len()];
if case_sensitive { if case_sensitive {
prefix == query prefix == query
} else { } else {
@ -396,9 +396,9 @@ impl Query {
pub fn search_dependencies( pub fn search_dependencies(
db: &dyn DefDatabase, db: &dyn DefDatabase,
krate: CrateId, krate: CrateId,
ref query: Query, query: &Query,
) -> FxHashSet<ItemInNs> { ) -> FxHashSet<ItemInNs> {
let _p = profile::span("search_dependencies").detail(|| format!("{query:?}")); let _p = tracing::span!(tracing::Level::INFO, "search_dependencies", ?query).entered();
let graph = db.crate_graph(); let graph = db.crate_graph();
@ -446,7 +446,7 @@ fn search_maps(
let end = (value & 0xFFFF_FFFF) as usize; let end = (value & 0xFFFF_FFFF) as usize;
let start = (value >> 32) as usize; let start = (value >> 32) as usize;
let ImportMap { item_to_info_map, importables, .. } = &*import_maps[import_map_idx]; let ImportMap { item_to_info_map, importables, .. } = &*import_maps[import_map_idx];
let importables = &importables[start as usize..end]; let importables = &importables[start..end];
let iter = importables let iter = importables
.iter() .iter()
@ -516,7 +516,7 @@ mod tests {
}) })
.expect("could not find crate"); .expect("could not find crate");
let actual = search_dependencies(db.upcast(), krate, query) let actual = search_dependencies(db.upcast(), krate, &query)
.into_iter() .into_iter()
.filter_map(|dependency| { .filter_map(|dependency| {
let dependency_krate = dependency.krate(db.upcast())?; let dependency_krate = dependency.krate(db.upcast())?;

View file

@ -17,7 +17,7 @@ use syntax::ast;
use crate::{ use crate::{
db::DefDatabase, db::DefDatabase,
per_ns::PerNs, per_ns::PerNs,
visibility::{Visibility, VisibilityExplicity}, visibility::{Visibility, VisibilityExplicitness},
AdtId, BuiltinType, ConstId, ExternCrateId, HasModule, ImplId, LocalModuleId, Lookup, MacroId, AdtId, BuiltinType, ConstId, ExternCrateId, HasModule, ImplId, LocalModuleId, Lookup, MacroId,
ModuleDefId, ModuleId, TraitId, UseId, ModuleDefId, ModuleId, TraitId, UseId,
}; };
@ -336,6 +336,12 @@ impl ItemScope {
pub(crate) fn macro_invoc(&self, call: AstId<ast::MacroCall>) -> Option<MacroCallId> { pub(crate) fn macro_invoc(&self, call: AstId<ast::MacroCall>) -> Option<MacroCallId> {
self.macro_invocations.get(&call).copied() self.macro_invocations.get(&call).copied()
} }
pub(crate) fn iter_macro_invoc(
&self,
) -> impl Iterator<Item = (&AstId<ast::MacroCall>, &MacroCallId)> {
self.macro_invocations.iter()
}
} }
impl ItemScope { impl ItemScope {
@ -647,14 +653,16 @@ impl ItemScope {
.map(|(_, vis, _)| vis) .map(|(_, vis, _)| vis)
.chain(self.values.values_mut().map(|(_, vis, _)| vis)) .chain(self.values.values_mut().map(|(_, vis, _)| vis))
.chain(self.unnamed_trait_imports.values_mut().map(|(vis, _)| vis)) .chain(self.unnamed_trait_imports.values_mut().map(|(vis, _)| vis))
.for_each(|vis| *vis = Visibility::Module(this_module, VisibilityExplicity::Implicit)); .for_each(|vis| {
*vis = Visibility::Module(this_module, VisibilityExplicitness::Implicit)
});
for (mac, vis, import) in self.macros.values_mut() { for (mac, vis, import) in self.macros.values_mut() {
if matches!(mac, MacroId::ProcMacroId(_) if import.is_none()) { if matches!(mac, MacroId::ProcMacroId(_) if import.is_none()) {
continue; continue;
} }
*vis = Visibility::Module(this_module, VisibilityExplicity::Implicit); *vis = Visibility::Module(this_module, VisibilityExplicitness::Implicit);
} }
} }

View file

@ -69,7 +69,7 @@ use crate::{
generics::{GenericParams, LifetimeParamData, TypeOrConstParamData}, generics::{GenericParams, LifetimeParamData, TypeOrConstParamData},
path::{path, AssociatedTypeBinding, GenericArgs, ImportAlias, ModPath, Path, PathKind}, path::{path, AssociatedTypeBinding, GenericArgs, ImportAlias, ModPath, Path, PathKind},
type_ref::{Mutability, TraitRef, TypeBound, TypeRef}, type_ref::{Mutability, TraitRef, TypeBound, TypeRef},
visibility::{RawVisibility, VisibilityExplicity}, visibility::{RawVisibility, VisibilityExplicitness},
BlockId, Lookup, BlockId, Lookup,
}; };
@ -109,7 +109,8 @@ pub struct ItemTree {
impl ItemTree { impl ItemTree {
pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> { pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
let _p = profile::span("file_item_tree_query").detail(|| format!("{file_id:?}")); let _p = tracing::span!(tracing::Level::INFO, "file_item_tree_query", ?file_id).entered();
let syntax = db.parse_or_expand(file_id); let syntax = db.parse_or_expand(file_id);
let ctx = lower::Ctx::new(db, file_id); let ctx = lower::Ctx::new(db, file_id);
@ -252,10 +253,10 @@ impl ItemVisibilities {
RawVisibility::Public => RawVisibilityId::PUB, RawVisibility::Public => RawVisibilityId::PUB,
RawVisibility::Module(path, explicitiy) if path.segments().is_empty() => { RawVisibility::Module(path, explicitiy) if path.segments().is_empty() => {
match (&path.kind, explicitiy) { match (&path.kind, explicitiy) {
(PathKind::Super(0), VisibilityExplicity::Explicit) => { (PathKind::Super(0), VisibilityExplicitness::Explicit) => {
RawVisibilityId::PRIV_EXPLICIT RawVisibilityId::PRIV_EXPLICIT
} }
(PathKind::Super(0), VisibilityExplicity::Implicit) => { (PathKind::Super(0), VisibilityExplicitness::Implicit) => {
RawVisibilityId::PRIV_IMPLICIT RawVisibilityId::PRIV_IMPLICIT
} }
(PathKind::Crate, _) => RawVisibilityId::PUB_CRATE, (PathKind::Crate, _) => RawVisibilityId::PUB_CRATE,
@ -269,11 +270,11 @@ impl ItemVisibilities {
static VIS_PUB: RawVisibility = RawVisibility::Public; static VIS_PUB: RawVisibility = RawVisibility::Public;
static VIS_PRIV_IMPLICIT: RawVisibility = static VIS_PRIV_IMPLICIT: RawVisibility =
RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)), VisibilityExplicity::Implicit); RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)), VisibilityExplicitness::Implicit);
static VIS_PRIV_EXPLICIT: RawVisibility = static VIS_PRIV_EXPLICIT: RawVisibility =
RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)), VisibilityExplicity::Explicit); RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)), VisibilityExplicitness::Explicit);
static VIS_PUB_CRATE: RawVisibility = static VIS_PUB_CRATE: RawVisibility =
RawVisibility::Module(ModPath::from_kind(PathKind::Crate), VisibilityExplicity::Explicit); RawVisibility::Module(ModPath::from_kind(PathKind::Crate), VisibilityExplicitness::Explicit);
#[derive(Default, Debug, Eq, PartialEq)] #[derive(Default, Debug, Eq, PartialEq)]
struct ItemTreeData { struct ItemTreeData {

View file

@ -91,7 +91,7 @@ impl LangItems {
db: &dyn DefDatabase, db: &dyn DefDatabase,
krate: CrateId, krate: CrateId,
) -> Option<Arc<LangItems>> { ) -> Option<Arc<LangItems>> {
let _p = profile::span("crate_lang_items_query"); let _p = tracing::span!(tracing::Level::INFO, "crate_lang_items_query").entered();
let mut lang_items = LangItems::default(); let mut lang_items = LangItems::default();
@ -163,7 +163,7 @@ impl LangItems {
start_crate: CrateId, start_crate: CrateId,
item: LangItem, item: LangItem,
) -> Option<LangItemTarget> { ) -> Option<LangItemTarget> {
let _p = profile::span("lang_item_query"); let _p = tracing::span!(tracing::Level::INFO, "lang_item_query").entered();
if let Some(target) = if let Some(target) =
db.crate_lang_items(start_crate).and_then(|it| it.items.get(&item).copied()) db.crate_lang_items(start_crate).and_then(|it| it.items.get(&item).copied())
{ {
@ -183,7 +183,7 @@ impl LangItems {
) where ) where
T: Into<AttrDefId> + Copy, T: Into<AttrDefId> + Copy,
{ {
let _p = profile::span("collect_lang_item"); let _p = tracing::span!(tracing::Level::INFO, "collect_lang_item").entered();
if let Some(lang_item) = lang_attr(db, item.into()) { if let Some(lang_item) = lang_attr(db, item.into()) {
self.items.entry(lang_item).or_insert_with(|| constructor(item)); self.items.entry(lang_item).or_insert_with(|| constructor(item));
} }
@ -199,7 +199,7 @@ pub(crate) fn notable_traits_in_deps(
db: &dyn DefDatabase, db: &dyn DefDatabase,
krate: CrateId, krate: CrateId,
) -> Arc<[Arc<[TraitId]>]> { ) -> Arc<[Arc<[TraitId]>]> {
let _p = profile::span("notable_traits_in_deps").detail(|| format!("{krate:?}")); let _p = tracing::span!(tracing::Level::INFO, "notable_traits_in_deps", ?krate).entered();
let crate_graph = db.crate_graph(); let crate_graph = db.crate_graph();
Arc::from_iter( Arc::from_iter(
@ -208,7 +208,7 @@ pub(crate) fn notable_traits_in_deps(
} }
pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: CrateId) -> Option<Arc<[TraitId]>> { pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: CrateId) -> Option<Arc<[TraitId]>> {
let _p = profile::span("crate_notable_traits").detail(|| format!("{krate:?}")); let _p = tracing::span!(tracing::Level::INFO, "crate_notable_traits", ?krate).entered();
let mut traits = Vec::new(); let mut traits = Vec::new();

View file

@ -745,7 +745,7 @@ impl InTypeConstId {
} }
} }
/// A constant, which might appears as a const item, an annonymous const block in expressions /// A constant, which might appears as a const item, an anonymous const block in expressions
/// or patterns, or as a constant in types with const generics. /// or patterns, or as a constant in types with const generics.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum GeneralConstId { pub enum GeneralConstId {

View file

@ -460,13 +460,13 @@ fn test_concat_expand() {
#[rustc_builtin_macro] #[rustc_builtin_macro]
macro_rules! concat {} macro_rules! concat {}
fn main() { concat!("foo", "r", 0, r#"bar"#, "\n", false, '"', '\0'); } fn main() { concat!("fo", "o", 0, r#"bar"#, "\n", false, '"', '\0'); }
"##, "##,
expect![[r##" expect![[r##"
#[rustc_builtin_macro] #[rustc_builtin_macro]
macro_rules! concat {} macro_rules! concat {}
fn main() { "foor0bar\nfalse\"\u{0}"; } fn main() { "foo0bar\nfalse\"\u{0}"; }
"##]], "##]],
); );
} }

View file

@ -72,7 +72,7 @@ fn main() {
} }
"#]], "#]],
); );
// FIXME we should ahev testing infra for multi level expansion tests // FIXME we should have testing infra for multi level expansion tests
check( check(
r#" r#"
macro_rules! __rust_force_expr { macro_rules! __rust_force_expr {
@ -544,11 +544,11 @@ fn test_proptest_arbitrary() {
check( check(
r#" r#"
macro_rules! arbitrary { macro_rules! arbitrary {
([$($bounds : tt)*] $typ: ty, $strat: ty, $params: ty; ([$($bounds : tt)*] $typ: ty, $strategy: ty, $params: ty;
$args: ident => $logic: expr) => { $args: ident => $logic: expr) => {
impl<$($bounds)*> $crate::arbitrary::Arbitrary for $typ { impl<$($bounds)*> $crate::arbitrary::Arbitrary for $typ {
type Parameters = $params; type Parameters = $params;
type Strategy = $strat; type Strategy = $strategy;
fn arbitrary_with($args: Self::Parameters) -> Self::Strategy { fn arbitrary_with($args: Self::Parameters) -> Self::Strategy {
$logic $logic
} }
@ -569,11 +569,11 @@ arbitrary!(
"#, "#,
expect![[r#" expect![[r#"
macro_rules! arbitrary { macro_rules! arbitrary {
([$($bounds : tt)*] $typ: ty, $strat: ty, $params: ty; ([$($bounds : tt)*] $typ: ty, $strategy: ty, $params: ty;
$args: ident => $logic: expr) => { $args: ident => $logic: expr) => {
impl<$($bounds)*> $crate::arbitrary::Arbitrary for $typ { impl<$($bounds)*> $crate::arbitrary::Arbitrary for $typ {
type Parameters = $params; type Parameters = $params;
type Strategy = $strat; type Strategy = $strategy;
fn arbitrary_with($args: Self::Parameters) -> Self::Strategy { fn arbitrary_with($args: Self::Parameters) -> Self::Strategy {
$logic $logic
} }

View file

@ -25,7 +25,7 @@ use hir_expand::{
InFile, MacroFileId, MacroFileIdExt, InFile, MacroFileId, MacroFileIdExt,
}; };
use span::Span; use span::Span;
use stdx::format_to; use stdx::{format_to, format_to_acc};
use syntax::{ use syntax::{
ast::{self, edit::IndentLevel}, ast::{self, edit::IndentLevel},
AstNode, AstNode,
@ -149,8 +149,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
if tree { if tree {
let tree = format!("{:#?}", parse.syntax_node()) let tree = format!("{:#?}", parse.syntax_node())
.split_inclusive('\n') .split_inclusive('\n')
.map(|line| format!("// {line}")) .fold(String::new(), |mut acc, line| format_to_acc!(acc, "// {line}"));
.collect::<String>();
format_to!(expn_text, "\n{}", tree) format_to!(expn_text, "\n{}", tree)
} }
let range = call.syntax().text_range(); let range = call.syntax().text_range();

View file

@ -79,7 +79,7 @@ use crate::{
nameres::{diagnostics::DefDiagnostic, path_resolution::ResolveMode}, nameres::{diagnostics::DefDiagnostic, path_resolution::ResolveMode},
path::ModPath, path::ModPath,
per_ns::PerNs, per_ns::PerNs,
visibility::{Visibility, VisibilityExplicity}, visibility::{Visibility, VisibilityExplicitness},
AstId, BlockId, BlockLoc, CrateRootModuleId, EnumId, EnumVariantId, ExternCrateId, FunctionId, AstId, BlockId, BlockLoc, CrateRootModuleId, EnumId, EnumVariantId, ExternCrateId, FunctionId,
LocalModuleId, Lookup, MacroExpander, MacroId, ModuleId, ProcMacroId, UseId, LocalModuleId, Lookup, MacroExpander, MacroId, ModuleId, ProcMacroId, UseId,
}; };
@ -306,9 +306,10 @@ impl DefMap {
pub const ROOT: LocalModuleId = LocalModuleId::from_raw(la_arena::RawIdx::from_u32(0)); pub const ROOT: LocalModuleId = LocalModuleId::from_raw(la_arena::RawIdx::from_u32(0));
pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> { pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> {
let _p = profile::span("crate_def_map_query").detail(|| { let crate_graph = db.crate_graph();
db.crate_graph()[krate].display_name.as_deref().unwrap_or_default().to_string() let krate_name = crate_graph[krate].display_name.as_deref().unwrap_or_default();
});
let _p = tracing::span!(tracing::Level::INFO, "crate_def_map_query", ?krate_name).entered();
let crate_graph = db.crate_graph(); let crate_graph = db.crate_graph();
@ -335,7 +336,7 @@ impl DefMap {
// this visibility for anything outside IDE, so that's probably OK. // this visibility for anything outside IDE, so that's probably OK.
let visibility = Visibility::Module( let visibility = Visibility::Module(
ModuleId { krate, local_id, block: None }, ModuleId { krate, local_id, block: None },
VisibilityExplicity::Implicit, VisibilityExplicitness::Implicit,
); );
let module_data = ModuleData::new( let module_data = ModuleData::new(
ModuleOrigin::BlockExpr { block: block.ast_id, id: block_id }, ModuleOrigin::BlockExpr { block: block.ast_id, id: block_id },

View file

@ -273,7 +273,7 @@ struct DefCollector<'a> {
impl DefCollector<'_> { impl DefCollector<'_> {
fn seed_with_top_level(&mut self) { fn seed_with_top_level(&mut self) {
let _p = profile::span("seed_with_top_level"); let _p = tracing::span!(tracing::Level::INFO, "seed_with_top_level").entered();
let file_id = self.db.crate_graph()[self.def_map.krate].root_file_id; let file_id = self.db.crate_graph()[self.def_map.krate].root_file_id;
let item_tree = self.db.file_item_tree(file_id.into()); let item_tree = self.db.file_item_tree(file_id.into());
@ -401,7 +401,7 @@ impl DefCollector<'_> {
} }
fn resolution_loop(&mut self) { fn resolution_loop(&mut self) {
let _p = profile::span("DefCollector::resolution_loop"); let _p = tracing::span!(tracing::Level::INFO, "DefCollector::resolution_loop").entered();
// main name resolution fixed-point loop. // main name resolution fixed-point loop.
let mut i = 0; let mut i = 0;
@ -410,7 +410,7 @@ impl DefCollector<'_> {
self.db.unwind_if_cancelled(); self.db.unwind_if_cancelled();
{ {
let _p = profile::span("resolve_imports loop"); let _p = tracing::span!(tracing::Level::INFO, "resolve_imports loop").entered();
'resolve_imports: loop { 'resolve_imports: loop {
if self.resolve_imports() == ReachedFixedPoint::Yes { if self.resolve_imports() == ReachedFixedPoint::Yes {
@ -436,7 +436,7 @@ impl DefCollector<'_> {
} }
fn collect(&mut self) { fn collect(&mut self) {
let _p = profile::span("DefCollector::collect"); let _p = tracing::span!(tracing::Level::INFO, "DefCollector::collect").entered();
self.resolution_loop(); self.resolution_loop();
@ -792,8 +792,8 @@ impl DefCollector<'_> {
} }
fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialResolvedImport { fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialResolvedImport {
let _p = profile::span("resolve_import") let _p = tracing::span!(tracing::Level::INFO, "resolve_import", import_path = %import.path.display(self.db.upcast()))
.detail(|| format!("{}", import.path.display(self.db.upcast()))); .entered();
tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition); tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition);
match import.source { match import.source {
ImportSource::ExternCrate { .. } => { ImportSource::ExternCrate { .. } => {
@ -856,7 +856,7 @@ impl DefCollector<'_> {
} }
fn record_resolved_import(&mut self, directive: &ImportDirective) { fn record_resolved_import(&mut self, directive: &ImportDirective) {
let _p = profile::span("record_resolved_import"); let _p = tracing::span!(tracing::Level::INFO, "record_resolved_import").entered();
let module_id = directive.module_id; let module_id = directive.module_id;
let import = &directive.import; let import = &directive.import;
@ -1430,7 +1430,7 @@ impl DefCollector<'_> {
fn finish(mut self) -> DefMap { fn finish(mut self) -> DefMap {
// Emit diagnostics for all remaining unexpanded macros. // Emit diagnostics for all remaining unexpanded macros.
let _p = profile::span("DefCollector::finish"); let _p = tracing::span!(tracing::Level::INFO, "DefCollector::finish").entered();
for directive in &self.unresolved_macros { for directive in &self.unresolved_macros {
match &directive.kind { match &directive.kind {
@ -1924,7 +1924,7 @@ impl ModCollector<'_, '_> {
item_tree: self.item_tree, item_tree: self.item_tree,
mod_dir, mod_dir,
} }
.collect_in_top_module(&*items); .collect_in_top_module(items);
if is_macro_use { if is_macro_use {
self.import_all_legacy_macros(module_id); self.import_all_legacy_macros(module_id);
} }

View file

@ -87,7 +87,7 @@ impl DefMap {
within_impl: bool, within_impl: bool,
) -> Option<Visibility> { ) -> Option<Visibility> {
let mut vis = match visibility { let mut vis = match visibility {
RawVisibility::Module(path, explicity) => { RawVisibility::Module(path, explicitness) => {
let (result, remaining) = let (result, remaining) =
self.resolve_path(db, original_module, path, BuiltinShadowMode::Module, None); self.resolve_path(db, original_module, path, BuiltinShadowMode::Module, None);
if remaining.is_some() { if remaining.is_some() {
@ -95,7 +95,7 @@ impl DefMap {
} }
let types = result.take_types()?; let types = result.take_types()?;
match types { match types {
ModuleDefId::ModuleId(m) => Visibility::Module(m, *explicity), ModuleDefId::ModuleId(m) => Visibility::Module(m, *explicitness),
// error: visibility needs to refer to module // error: visibility needs to refer to module
_ => { _ => {
return None; return None;
@ -269,7 +269,7 @@ impl DefMap {
stdx::never!(module.is_block_module()); stdx::never!(module.is_block_module());
if self.block != def_map.block { if self.block != def_map.block {
// If we have a different `DefMap` from `self` (the orignal `DefMap` we started // If we have a different `DefMap` from `self` (the original `DefMap` we started
// with), resolve the remaining path segments in that `DefMap`. // with), resolve the remaining path segments in that `DefMap`.
let path = let path =
ModPath::from_segments(PathKind::Super(0), path.segments().iter().cloned()); ModPath::from_segments(PathKind::Super(0), path.segments().iter().cloned());
@ -475,7 +475,7 @@ impl DefMap {
let macro_use_prelude = || { let macro_use_prelude = || {
self.macro_use_prelude.get(name).map_or(PerNs::none(), |&(it, _extern_crate)| { self.macro_use_prelude.get(name).map_or(PerNs::none(), |&(it, _extern_crate)| {
PerNs::macros( PerNs::macros(
it.into(), it,
Visibility::Public, Visibility::Public,
// FIXME? // FIXME?
None, // extern_crate.map(ImportOrExternCrate::ExternCrate), None, // extern_crate.map(ImportOrExternCrate::ExternCrate),
@ -540,7 +540,7 @@ impl DefMap {
} }
} }
/// Given a block module, returns its nearest non-block module and the `DefMap` it blongs to. /// Given a block module, returns its nearest non-block module and the `DefMap` it belongs to.
fn adjust_to_nearest_non_block_module( fn adjust_to_nearest_non_block_module(
db: &dyn DefDatabase, db: &dyn DefDatabase,
def_map: &DefMap, def_map: &DefMap,

View file

@ -16,19 +16,13 @@ pub enum Namespace {
Macros, Macros,
} }
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)]
pub struct PerNs { pub struct PerNs {
pub types: Option<(ModuleDefId, Visibility, Option<ImportOrExternCrate>)>, pub types: Option<(ModuleDefId, Visibility, Option<ImportOrExternCrate>)>,
pub values: Option<(ModuleDefId, Visibility, Option<ImportId>)>, pub values: Option<(ModuleDefId, Visibility, Option<ImportId>)>,
pub macros: Option<(MacroId, Visibility, Option<ImportId>)>, pub macros: Option<(MacroId, Visibility, Option<ImportId>)>,
} }
impl Default for PerNs {
fn default() -> Self {
PerNs { types: None, values: None, macros: None }
}
}
impl PerNs { impl PerNs {
pub fn none() -> PerNs { pub fn none() -> PerNs {
PerNs { types: None, values: None, macros: None } PerNs { types: None, values: None, macros: None }
@ -92,7 +86,7 @@ impl PerNs {
} }
pub fn filter_visibility(self, mut f: impl FnMut(Visibility) -> bool) -> PerNs { pub fn filter_visibility(self, mut f: impl FnMut(Visibility) -> bool) -> PerNs {
let _p = profile::span("PerNs::filter_visibility"); let _p = tracing::span!(tracing::Level::INFO, "PerNs::filter_visibility").entered();
PerNs { PerNs {
types: self.types.filter(|&(_, v, _)| f(v)), types: self.types.filter(|&(_, v, _)| f(v)),
values: self.values.filter(|&(_, v, _)| f(v)), values: self.values.filter(|&(_, v, _)| f(v)),
@ -125,19 +119,17 @@ impl PerNs {
} }
pub fn iter_items(self) -> impl Iterator<Item = (ItemInNs, Option<ImportOrExternCrate>)> { pub fn iter_items(self) -> impl Iterator<Item = (ItemInNs, Option<ImportOrExternCrate>)> {
let _p = profile::span("PerNs::iter_items"); let _p = tracing::span!(tracing::Level::INFO, "PerNs::iter_items").entered();
self.types self.types
.map(|it| (ItemInNs::Types(it.0), it.2)) .map(|it| (ItemInNs::Types(it.0), it.2))
.into_iter() .into_iter()
.chain( .chain(
self.values self.values
.map(|it| (ItemInNs::Values(it.0), it.2.map(ImportOrExternCrate::Import))) .map(|it| (ItemInNs::Values(it.0), it.2.map(ImportOrExternCrate::Import))),
.into_iter(),
) )
.chain( .chain(
self.macros self.macros
.map(|it| (ItemInNs::Macros(it.0), it.2.map(ImportOrExternCrate::Import))) .map(|it| (ItemInNs::Macros(it.0), it.2.map(ImportOrExternCrate::Import))),
.into_iter(),
) )
} }
} }

View file

@ -41,13 +41,13 @@ impl Default for TestDB {
impl Upcast<dyn ExpandDatabase> for TestDB { impl Upcast<dyn ExpandDatabase> for TestDB {
fn upcast(&self) -> &(dyn ExpandDatabase + 'static) { fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
&*self self
} }
} }
impl Upcast<dyn DefDatabase> for TestDB { impl Upcast<dyn DefDatabase> for TestDB {
fn upcast(&self) -> &(dyn DefDatabase + 'static) { fn upcast(&self) -> &(dyn DefDatabase + 'static) {
&*self self
} }
} }

View file

@ -20,14 +20,17 @@ use crate::{
pub enum RawVisibility { pub enum RawVisibility {
/// `pub(in module)`, `pub(crate)` or `pub(super)`. Also private, which is /// `pub(in module)`, `pub(crate)` or `pub(super)`. Also private, which is
/// equivalent to `pub(self)`. /// equivalent to `pub(self)`.
Module(ModPath, VisibilityExplicity), Module(ModPath, VisibilityExplicitness),
/// `pub`. /// `pub`.
Public, Public,
} }
impl RawVisibility { impl RawVisibility {
pub(crate) const fn private() -> RawVisibility { pub(crate) const fn private() -> RawVisibility {
RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)), VisibilityExplicity::Implicit) RawVisibility::Module(
ModPath::from_kind(PathKind::Super(0)),
VisibilityExplicitness::Implicit,
)
} }
pub(crate) fn from_ast( pub(crate) fn from_ast(
@ -53,19 +56,19 @@ impl RawVisibility {
None => return RawVisibility::private(), None => return RawVisibility::private(),
Some(path) => path, Some(path) => path,
}; };
RawVisibility::Module(path, VisibilityExplicity::Explicit) RawVisibility::Module(path, VisibilityExplicitness::Explicit)
} }
ast::VisibilityKind::PubCrate => { ast::VisibilityKind::PubCrate => {
let path = ModPath::from_kind(PathKind::Crate); let path = ModPath::from_kind(PathKind::Crate);
RawVisibility::Module(path, VisibilityExplicity::Explicit) RawVisibility::Module(path, VisibilityExplicitness::Explicit)
} }
ast::VisibilityKind::PubSuper => { ast::VisibilityKind::PubSuper => {
let path = ModPath::from_kind(PathKind::Super(1)); let path = ModPath::from_kind(PathKind::Super(1));
RawVisibility::Module(path, VisibilityExplicity::Explicit) RawVisibility::Module(path, VisibilityExplicitness::Explicit)
} }
ast::VisibilityKind::PubSelf => { ast::VisibilityKind::PubSelf => {
let path = ModPath::from_kind(PathKind::Super(0)); let path = ModPath::from_kind(PathKind::Super(0));
RawVisibility::Module(path, VisibilityExplicity::Explicit) RawVisibility::Module(path, VisibilityExplicitness::Explicit)
} }
ast::VisibilityKind::Pub => RawVisibility::Public, ast::VisibilityKind::Pub => RawVisibility::Public,
} }
@ -85,7 +88,7 @@ impl RawVisibility {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum Visibility { pub enum Visibility {
/// Visibility is restricted to a certain module. /// Visibility is restricted to a certain module.
Module(ModuleId, VisibilityExplicity), Module(ModuleId, VisibilityExplicitness),
/// Visibility is unrestricted. /// Visibility is unrestricted.
Public, Public,
} }
@ -206,12 +209,12 @@ impl Visibility {
/// Whether the item was imported through `pub(crate) use` or just `use`. /// Whether the item was imported through `pub(crate) use` or just `use`.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum VisibilityExplicity { pub enum VisibilityExplicitness {
Explicit, Explicit,
Implicit, Implicit,
} }
impl VisibilityExplicity { impl VisibilityExplicitness {
pub fn is_explicit(&self) -> bool { pub fn is_explicit(&self) -> bool {
matches!(self, Self::Explicit) matches!(self, Self::Explicit)
} }

View file

@ -230,12 +230,12 @@ impl Attr {
) )
) )
}) })
.unwrap_or_else(|| tt.len()); .unwrap_or(tt.len());
let (path, input) = tt.split_at(path_end); let (path, input) = tt.split_at(path_end);
let path = Interned::new(ModPath::from_tt(db, path)?); let path = Interned::new(ModPath::from_tt(db, path)?);
let input = match input.get(0) { let input = match input.first() {
Some(tt::TokenTree::Subtree(tree)) => { Some(tt::TokenTree::Subtree(tree)) => {
Some(Interned::new(AttrInput::TokenTree(Box::new(tree.clone())))) Some(Interned::new(AttrInput::TokenTree(Box::new(tree.clone()))))
} }

View file

@ -80,6 +80,9 @@ pub trait ExpandDatabase: SourceDatabase {
#[salsa::invoke(SpanMap::new)] #[salsa::invoke(SpanMap::new)]
fn span_map(&self, file_id: HirFileId) -> SpanMap; fn span_map(&self, file_id: HirFileId) -> SpanMap;
#[salsa::transparent]
#[salsa::invoke(crate::span_map::expansion_span_map)]
fn expansion_span_map(&self, file_id: MacroFileId) -> Arc<ExpansionSpanMap>;
#[salsa::invoke(crate::span_map::real_span_map)] #[salsa::invoke(crate::span_map::real_span_map)]
fn real_span_map(&self, file_id: FileId) -> Arc<RealSpanMap>; fn real_span_map(&self, file_id: FileId) -> Arc<RealSpanMap>;
@ -280,7 +283,7 @@ fn parse_macro_expansion(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
macro_file: MacroFileId, macro_file: MacroFileId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> { ) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
let _p = profile::span("parse_macro_expansion"); let _p = tracing::span!(tracing::Level::INFO, "parse_macro_expansion").entered();
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let expand_to = loc.expand_to(); let expand_to = loc.expand_to();
let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc); let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc);
@ -501,7 +504,7 @@ fn macro_expand(
macro_call_id: MacroCallId, macro_call_id: MacroCallId,
loc: MacroCallLoc, loc: MacroCallLoc,
) -> ExpandResult<CowArc<tt::Subtree>> { ) -> ExpandResult<CowArc<tt::Subtree>> {
let _p = profile::span("macro_expand"); let _p = tracing::span!(tracing::Level::INFO, "macro_expand").entered();
let ExpandResult { value: tt, mut err } = match loc.def.kind { let ExpandResult { value: tt, mut err } = match loc.def.kind {
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id).map(CowArc::Arc), MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id).map(CowArc::Arc),

View file

@ -5,7 +5,7 @@ use either::Either;
use span::{FileId, FileRange, HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId}; use span::{FileId, FileRange, HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId};
use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize}; use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize};
use crate::{db, ExpansionInfo, MacroFileIdExt}; use crate::{db, map_node_range_up, span_for_offset, MacroFileIdExt};
/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree. /// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
/// ///
@ -147,7 +147,7 @@ impl InFile<&SyntaxNode> {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => { HirFileIdRepr::MacroFile(mac_file) => {
if let Some((res, ctxt)) = if let Some((res, ctxt)) =
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range()) map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range())
{ {
// FIXME: Figure out an API that makes proper use of ctx, this only exists to // FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour. // keep pre-token map rewrite behaviour.
@ -163,12 +163,15 @@ impl InFile<&SyntaxNode> {
} }
/// Falls back to the macro call range if the node cannot be mapped up fully. /// Falls back to the macro call range if the node cannot be mapped up fully.
pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange { pub fn original_file_range_with_macro_call_body(
self,
db: &dyn db::ExpandDatabase,
) -> FileRange {
match self.file_id.repr() { match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => { HirFileIdRepr::MacroFile(mac_file) => {
if let Some((res, ctxt)) = if let Some((res, ctxt)) =
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range()) map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range())
{ {
// FIXME: Figure out an API that makes proper use of ctx, this only exists to // FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour. // keep pre-token map rewrite behaviour.
@ -193,7 +196,7 @@ impl InFile<&SyntaxNode> {
Some((FileRange { file_id, range: self.value.text_range() }, SyntaxContextId::ROOT)) Some((FileRange { file_id, range: self.value.text_range() }, SyntaxContextId::ROOT))
} }
HirFileIdRepr::MacroFile(mac_file) => { HirFileIdRepr::MacroFile(mac_file) => {
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range()) map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range())
} }
} }
} }
@ -215,7 +218,7 @@ impl InFile<&SyntaxNode> {
} }
let (FileRange { file_id, range }, ctx) = let (FileRange { file_id, range }, ctx) =
ExpansionInfo::new(db, file_id).map_node_range_up(db, self.value.text_range())?; map_node_range_up(db, &db.expansion_span_map(file_id), self.value.text_range())?;
// FIXME: Figure out an API that makes proper use of ctx, this only exists to // FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour. // keep pre-token map rewrite behaviour.
@ -246,8 +249,11 @@ impl InFile<SyntaxToken> {
match self.file_id.repr() { match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => { HirFileIdRepr::MacroFile(mac_file) => {
let (range, ctxt) = ExpansionInfo::new(db, mac_file) let (range, ctxt) = span_for_offset(
.span_for_offset(db, self.value.text_range().start()); db,
&db.expansion_span_map(mac_file),
self.value.text_range().start(),
);
// FIXME: Figure out an API that makes proper use of ctx, this only exists to // FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour. // keep pre-token map rewrite behaviour.
@ -269,8 +275,11 @@ impl InFile<SyntaxToken> {
Some(FileRange { file_id, range: self.value.text_range() }) Some(FileRange { file_id, range: self.value.text_range() })
} }
HirFileIdRepr::MacroFile(mac_file) => { HirFileIdRepr::MacroFile(mac_file) => {
let (range, ctxt) = ExpansionInfo::new(db, mac_file) let (range, ctxt) = span_for_offset(
.span_for_offset(db, self.value.text_range().start()); db,
&db.expansion_span_map(mac_file),
self.value.text_range().start(),
);
// FIXME: Figure out an API that makes proper use of ctx, this only exists to // FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour. // keep pre-token map rewrite behaviour.
@ -286,7 +295,7 @@ impl InFile<SyntaxToken> {
impl InMacroFile<TextSize> { impl InMacroFile<TextSize> {
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> (FileRange, SyntaxContextId) { pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> (FileRange, SyntaxContextId) {
ExpansionInfo::new(db, self.file_id).span_for_offset(db, self.value) span_for_offset(db, &db.expansion_span_map(self.file_id), self.value)
} }
} }
@ -300,7 +309,7 @@ impl InFile<TextRange> {
(FileRange { file_id, range: self.value }, SyntaxContextId::ROOT) (FileRange { file_id, range: self.value }, SyntaxContextId::ROOT)
} }
HirFileIdRepr::MacroFile(mac_file) => { HirFileIdRepr::MacroFile(mac_file) => {
match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) { match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) {
Some(it) => it, Some(it) => it,
None => { None => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
@ -315,7 +324,7 @@ impl InFile<TextRange> {
match self.file_id.repr() { match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value }, HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
HirFileIdRepr::MacroFile(mac_file) => { HirFileIdRepr::MacroFile(mac_file) => {
match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) { match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) {
Some((it, SyntaxContextId::ROOT)) => it, Some((it, SyntaxContextId::ROOT)) => it,
_ => { _ => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
@ -335,7 +344,7 @@ impl InFile<TextRange> {
Some((FileRange { file_id, range: self.value }, SyntaxContextId::ROOT)) Some((FileRange { file_id, range: self.value }, SyntaxContextId::ROOT))
} }
HirFileIdRepr::MacroFile(mac_file) => { HirFileIdRepr::MacroFile(mac_file) => {
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) map_node_range_up(db, &db.expansion_span_map(mac_file), self.value)
} }
} }
} }
@ -355,8 +364,11 @@ impl<N: AstNode> InFile<N> {
return None; return None;
} }
let (FileRange { file_id, range }, ctx) = ExpansionInfo::new(db, file_id) let (FileRange { file_id, range }, ctx) = map_node_range_up(
.map_node_range_up(db, self.value.syntax().text_range())?; db,
&db.expansion_span_map(file_id),
self.value.syntax().text_range(),
)?;
// FIXME: Figure out an API that makes proper use of ctx, this only exists to // FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour. // keep pre-token map rewrite behaviour.

View file

@ -523,6 +523,24 @@ impl MacroCallLoc {
} }
} }
} }
pub fn include_file_id(
&self,
db: &dyn ExpandDatabase,
macro_call_id: MacroCallId,
) -> Option<FileId> {
if self.def.is_include() {
if let Some(eager) = &self.eager {
if let Ok(it) =
builtin_fn_macro::include_input_to_file_id(db, macro_call_id, &eager.arg)
{
return Some(it);
}
}
}
None
}
} }
impl MacroCallKind { impl MacroCallKind {
@ -659,6 +677,10 @@ impl ExpansionInfo {
Some(self.arg.with_value(self.arg.value.as_ref()?.parent()?)) Some(self.arg.with_value(self.arg.value.as_ref()?.parent()?))
} }
pub fn call_file(&self) -> HirFileId {
self.arg.file_id
}
/// Maps the passed in file range down into a macro expansion if it is the input to a macro call. /// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
pub fn map_range_down( pub fn map_range_down(
&self, &self,
@ -679,13 +701,7 @@ impl ExpansionInfo {
offset: TextSize, offset: TextSize,
) -> (FileRange, SyntaxContextId) { ) -> (FileRange, SyntaxContextId) {
debug_assert!(self.expanded.value.text_range().contains(offset)); debug_assert!(self.expanded.value.text_range().contains(offset));
let span = self.exp_map.span_at(offset); span_for_offset(db, &self.exp_map, offset)
let anchor_offset = db
.ast_id_map(span.anchor.file_id.into())
.get_erased(span.anchor.ast_id)
.text_range()
.start();
(FileRange { file_id: span.anchor.file_id, range: span.range + anchor_offset }, span.ctx)
} }
/// Maps up the text range out of the expansion hierarchy back into the original file its from. /// Maps up the text range out of the expansion hierarchy back into the original file its from.
@ -695,27 +711,7 @@ impl ExpansionInfo {
range: TextRange, range: TextRange,
) -> Option<(FileRange, SyntaxContextId)> { ) -> Option<(FileRange, SyntaxContextId)> {
debug_assert!(self.expanded.value.text_range().contains_range(range)); debug_assert!(self.expanded.value.text_range().contains_range(range));
let mut spans = self.exp_map.spans_for_range(range); map_node_range_up(db, &self.exp_map, range)
let Span { range, anchor, ctx } = spans.next()?;
let mut start = range.start();
let mut end = range.end();
for span in spans {
if span.anchor != anchor || span.ctx != ctx {
return None;
}
start = start.min(span.range.start());
end = end.max(span.range.end());
}
let anchor_offset =
db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
Some((
FileRange {
file_id: anchor.file_id,
range: TextRange::new(start, end) + anchor_offset,
},
ctx,
))
} }
/// Maps up the text range out of the expansion into is macro call. /// Maps up the text range out of the expansion into is macro call.
@ -804,6 +800,47 @@ impl ExpansionInfo {
} }
} }
/// Maps up the text range out of the expansion hierarchy back into the original file its from.
pub fn map_node_range_up(
db: &dyn ExpandDatabase,
exp_map: &ExpansionSpanMap,
range: TextRange,
) -> Option<(FileRange, SyntaxContextId)> {
let mut spans = exp_map.spans_for_range(range);
let Span { range, anchor, ctx } = spans.next()?;
let mut start = range.start();
let mut end = range.end();
for span in spans {
if span.anchor != anchor || span.ctx != ctx {
return None;
}
start = start.min(span.range.start());
end = end.max(span.range.end());
}
let anchor_offset =
db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
Some((
FileRange { file_id: anchor.file_id, range: TextRange::new(start, end) + anchor_offset },
ctx,
))
}
/// Looks up the span at the given offset.
pub fn span_for_offset(
db: &dyn ExpandDatabase,
exp_map: &ExpansionSpanMap,
offset: TextSize,
) -> (FileRange, SyntaxContextId) {
let span = exp_map.span_at(offset);
let anchor_offset = db
.ast_id_map(span.anchor.file_id.into())
.get_erased(span.anchor.ast_id)
.text_range()
.start();
(FileRange { file_id: span.anchor.file_id, range: span.range + anchor_offset }, span.ctx)
}
/// In Rust, macros expand token trees to token trees. When we want to turn a /// In Rust, macros expand token trees to token trees. When we want to turn a
/// token tree into an AST node, we need to figure out what kind of AST node we /// token tree into an AST node, we need to figure out what kind of AST node we
/// want: something like `foo` can be a type, an expression, or a pattern. /// want: something like `foo` can be a type, an expression, or a pattern.

View file

@ -232,7 +232,7 @@ fn convert_path(
ast::PathSegmentKind::SuperKw => { ast::PathSegmentKind::SuperKw => {
let mut deg = 1; let mut deg = 1;
let mut next_segment = None; let mut next_segment = None;
while let Some(segment) = segments.next() { for segment in segments.by_ref() {
match segment.kind()? { match segment.kind()? {
ast::PathSegmentKind::SuperKw => deg += 1, ast::PathSegmentKind::SuperKw => deg += 1,
ast::PathSegmentKind::Name(name) => { ast::PathSegmentKind::Name(name) => {
@ -284,13 +284,13 @@ fn convert_path(
} }
fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModPath> { fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModPath> {
let mut leafs = tt.iter().filter_map(|tt| match tt { let mut leaves = tt.iter().filter_map(|tt| match tt {
tt::TokenTree::Leaf(leaf) => Some(leaf), tt::TokenTree::Leaf(leaf) => Some(leaf),
tt::TokenTree::Subtree(_) => None, tt::TokenTree::Subtree(_) => None,
}); });
let mut segments = smallvec::smallvec![]; let mut segments = smallvec::smallvec![];
let kind = match leafs.next()? { let kind = match leaves.next()? {
tt::Leaf::Punct(tt::Punct { char: ':', .. }) => match leafs.next()? { tt::Leaf::Punct(tt::Punct { char: ':', .. }) => match leaves.next()? {
tt::Leaf::Punct(tt::Punct { char: ':', .. }) => PathKind::Abs, tt::Leaf::Punct(tt::Punct { char: ':', .. }) => PathKind::Abs,
_ => return None, _ => return None,
}, },
@ -300,7 +300,7 @@ fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModP
tt::Leaf::Ident(tt::Ident { text, .. }) if text == "self" => PathKind::Super(0), tt::Leaf::Ident(tt::Ident { text, .. }) if text == "self" => PathKind::Super(0),
tt::Leaf::Ident(tt::Ident { text, .. }) if text == "super" => { tt::Leaf::Ident(tt::Ident { text, .. }) if text == "super" => {
let mut deg = 1; let mut deg = 1;
while let Some(tt::Leaf::Ident(tt::Ident { text, .. })) = leafs.next() { while let Some(tt::Leaf::Ident(tt::Ident { text, .. })) = leaves.next() {
if text != "super" { if text != "super" {
segments.push(Name::new_text_dont_use(text.clone())); segments.push(Name::new_text_dont_use(text.clone()));
break; break;
@ -316,7 +316,7 @@ fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModP
} }
_ => return None, _ => return None,
}; };
segments.extend(leafs.filter_map(|leaf| match leaf { segments.extend(leaves.filter_map(|leaf| match leaf {
::tt::Leaf::Ident(ident) => Some(Name::new_text_dont_use(ident.text.clone())), ::tt::Leaf::Ident(ident) => Some(Name::new_text_dont_use(ident.text.clone())),
_ => None, _ => None,
})); }));

View file

@ -1,4 +1,4 @@
//! Proc Macro Expander stub //! Proc Macro Expander stuff
use core::fmt; use core::fmt;
use std::{panic::RefUnwindSafe, sync}; use std::{panic::RefUnwindSafe, sync};

View file

@ -1,5 +1,5 @@
//! Span maps for real files and macro expansions. //! Span maps for real files and macro expansions.
use span::{FileId, HirFileId, HirFileIdRepr, Span}; use span::{FileId, HirFileId, HirFileIdRepr, MacroFileId, Span};
use syntax::{AstNode, TextRange}; use syntax::{AstNode, TextRange};
use triomphe::Arc; use triomphe::Arc;
@ -94,3 +94,10 @@ pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<Rea
tree.syntax().text_range().end(), tree.syntax().text_range().end(),
)) ))
} }
pub(crate) fn expansion_span_map(
db: &dyn ExpandDatabase,
file_id: MacroFileId,
) -> Arc<ExpansionSpanMap> {
db.parse_macro_expansion(file_id).value.1
}

View file

@ -142,7 +142,7 @@ pub(crate) fn deref_by_trait(
table @ &mut InferenceTable { db, .. }: &mut InferenceTable<'_>, table @ &mut InferenceTable { db, .. }: &mut InferenceTable<'_>,
ty: Ty, ty: Ty,
) -> Option<Ty> { ) -> Option<Ty> {
let _p = profile::span("deref_by_trait"); let _p = tracing::span!(tracing::Level::INFO, "deref_by_trait").entered();
if table.resolve_ty_shallow(&ty).inference_var(Interner).is_some() { if table.resolve_ty_shallow(&ty).inference_var(Interner).is_some() {
// don't try to deref unknown variables // don't try to deref unknown variables
return None; return None;

View file

@ -689,7 +689,7 @@ pub(crate) fn impl_datum_query(
krate: CrateId, krate: CrateId,
impl_id: ImplId, impl_id: ImplId,
) -> Arc<ImplDatum> { ) -> Arc<ImplDatum> {
let _p = profile::span("impl_datum"); let _p = tracing::span!(tracing::Level::INFO, "impl_datum").entered();
debug!("impl_datum {:?}", impl_id); debug!("impl_datum {:?}", impl_id);
let impl_: hir_def::ImplId = from_chalk(db, impl_id); let impl_: hir_def::ImplId = from_chalk(db, impl_id);
impl_def_datum(db, krate, impl_id, impl_) impl_def_datum(db, krate, impl_id, impl_)

View file

@ -118,7 +118,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
fn layout_of_ty(&self, ty: Ty, env: Arc<TraitEnvironment>) -> Result<Arc<Layout>, LayoutError>; fn layout_of_ty(&self, ty: Ty, env: Arc<TraitEnvironment>) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::target_data_layout_query)] #[salsa::invoke(crate::layout::target_data_layout_query)]
fn target_data_layout(&self, krate: CrateId) -> Option<Arc<TargetDataLayout>>; fn target_data_layout(&self, krate: CrateId) -> Result<Arc<TargetDataLayout>, Arc<str>>;
#[salsa::invoke(crate::method_resolution::lookup_impl_method_query)] #[salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
fn lookup_impl_method( fn lookup_impl_method(
@ -281,7 +281,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
} }
fn infer_wait(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> { fn infer_wait(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
let _p = profile::span("infer:wait").detail(|| match def { let detail = match def {
DefWithBodyId::FunctionId(it) => db.function_data(it).name.display(db.upcast()).to_string(), DefWithBodyId::FunctionId(it) => db.function_data(it).name.display(db.upcast()).to_string(),
DefWithBodyId::StaticId(it) => { DefWithBodyId::StaticId(it) => {
db.static_data(it).name.clone().display(db.upcast()).to_string() db.static_data(it).name.clone().display(db.upcast()).to_string()
@ -297,7 +297,8 @@ fn infer_wait(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult>
db.enum_variant_data(it).name.display(db.upcast()).to_string() db.enum_variant_data(it).name.display(db.upcast()).to_string()
} }
DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"), DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"),
}); };
let _p = tracing::span!(tracing::Level::INFO, "infer:wait", ?detail).entered();
db.infer_query(def) db.infer_query(def)
} }
@ -307,7 +308,7 @@ fn trait_solve_wait(
block: Option<BlockId>, block: Option<BlockId>,
goal: crate::Canonical<crate::InEnvironment<crate::Goal>>, goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
) -> Option<crate::Solution> { ) -> Option<crate::Solution> {
let _p = profile::span("trait_solve::wait"); let _p = tracing::span!(tracing::Level::INFO, "trait_solve::wait").entered();
db.trait_solve_query(krate, block, goal) db.trait_solve_query(krate, block, goal)
} }

View file

@ -45,7 +45,7 @@ mod allow {
} }
pub fn incorrect_case(db: &dyn HirDatabase, owner: ModuleDefId) -> Vec<IncorrectCase> { pub fn incorrect_case(db: &dyn HirDatabase, owner: ModuleDefId) -> Vec<IncorrectCase> {
let _p = profile::span("validate_module_item"); let _p = tracing::span!(tracing::Level::INFO, "validate_module_item").entered();
let mut validator = DeclValidator::new(db); let mut validator = DeclValidator::new(db);
validator.validate_item(owner); validator.validate_item(owner);
validator.sink validator.sink

View file

@ -48,7 +48,8 @@ pub enum BodyValidationDiagnostic {
impl BodyValidationDiagnostic { impl BodyValidationDiagnostic {
pub fn collect(db: &dyn HirDatabase, owner: DefWithBodyId) -> Vec<BodyValidationDiagnostic> { pub fn collect(db: &dyn HirDatabase, owner: DefWithBodyId) -> Vec<BodyValidationDiagnostic> {
let _p = profile::span("BodyValidationDiagnostic::collect"); let _p =
tracing::span!(tracing::Level::INFO, "BodyValidationDiagnostic::collect").entered();
let infer = db.infer(owner); let infer = db.infer(owner);
let mut validator = ExprValidator::new(owner, infer); let mut validator = ExprValidator::new(owner, infer);
validator.validate_body(db); validator.validate_body(db);

View file

@ -237,7 +237,7 @@ impl<'p> MatchCheckCtx<'p> {
ctor = Or; ctor = Or;
// Collect here because `Arena::alloc_extend` panics on reentrancy. // Collect here because `Arena::alloc_extend` panics on reentrancy.
let subpats: SmallVec<[_; 2]> = let subpats: SmallVec<[_; 2]> =
pats.into_iter().map(|pat| self.lower_pat(pat)).collect(); pats.iter().map(|pat| self.lower_pat(pat)).collect();
fields = self.pattern_arena.alloc_extend(subpats); fields = self.pattern_arena.alloc_extend(subpats);
} }
} }
@ -460,7 +460,8 @@ impl<'p> TypeCx for MatchCheckCtx<'p> {
_f: &mut fmt::Formatter<'_>, _f: &mut fmt::Formatter<'_>,
_pat: &rustc_pattern_analysis::pat::DeconstructedPat<'_, Self>, _pat: &rustc_pattern_analysis::pat::DeconstructedPat<'_, Self>,
) -> fmt::Result { ) -> fmt::Result {
unimplemented!() // FIXME: implement this, as using `unimplemented!()` causes panics in `tracing`.
Ok(())
} }
fn bug(&self, fmt: fmt::Arguments<'_>) -> ! { fn bug(&self, fmt: fmt::Arguments<'_>) -> ! {

View file

@ -605,8 +605,11 @@ fn render_const_scalar(
write!(f, "{}", f.db.union_data(u).name.display(f.db.upcast())) write!(f, "{}", f.db.union_data(u).name.display(f.db.upcast()))
} }
hir_def::AdtId::EnumId(e) => { hir_def::AdtId::EnumId(e) => {
let Ok(target_data_layout) = f.db.target_data_layout(trait_env.krate) else {
return f.write_str("<target-layout-not-available>");
};
let Some((var_id, var_layout)) = let Some((var_id, var_layout)) =
detect_variant_from_bytes(&layout, f.db, trait_env, b, e) detect_variant_from_bytes(&layout, f.db, &target_data_layout, b, e)
else { else {
return f.write_str("<failed-to-detect-variant>"); return f.write_str("<failed-to-detect-variant>");
}; };

View file

@ -75,7 +75,7 @@ pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
/// The entry point of type inference. /// The entry point of type inference.
pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> { pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
let _p = profile::span("infer_query"); let _p = tracing::span!(tracing::Level::INFO, "infer_query").entered();
let resolver = def.resolver(db.upcast()); let resolver = def.resolver(db.upcast());
let body = db.body(def); let body = db.body(def);
let mut ctx = InferenceContext::new(db, def, &body, resolver); let mut ctx = InferenceContext::new(db, def, &body, resolver);

View file

@ -509,7 +509,8 @@ impl<'a> InferenceTable<'a> {
} }
pub(crate) fn resolve_obligations_as_possible(&mut self) { pub(crate) fn resolve_obligations_as_possible(&mut self) {
let _span = profile::span("resolve_obligations_as_possible"); let _span =
tracing::span!(tracing::Level::INFO, "resolve_obligations_as_possible").entered();
let mut changed = true; let mut changed = true;
let mut obligations = mem::take(&mut self.resolve_obligations_buffer); let mut obligations = mem::take(&mut self.resolve_obligations_buffer);
while mem::take(&mut changed) { while mem::take(&mut changed) {

View file

@ -84,8 +84,7 @@ impl TypeVisitor<Interner> for UninhabitedFrom<'_> {
Some(0) | None => CONTINUE_OPAQUELY_INHABITED, Some(0) | None => CONTINUE_OPAQUELY_INHABITED,
Some(1..) => item_ty.super_visit_with(self, outer_binder), Some(1..) => item_ty.super_visit_with(self, outer_binder),
}, },
_ => CONTINUE_OPAQUELY_INHABITED,
TyKind::Ref(..) | _ => CONTINUE_OPAQUELY_INHABITED,
}; };
self.recursive_ty.remove(ty); self.recursive_ty.remove(ty);
self.max_depth += 1; self.max_depth += 1;

View file

@ -198,7 +198,7 @@ pub fn layout_of_ty_query(
trait_env: Arc<TraitEnvironment>, trait_env: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> { ) -> Result<Arc<Layout>, LayoutError> {
let krate = trait_env.krate; let krate = trait_env.krate;
let Some(target) = db.target_data_layout(krate) else { let Ok(target) = db.target_data_layout(krate) else {
return Err(LayoutError::TargetLayoutNotAvailable); return Err(LayoutError::TargetLayoutNotAvailable);
}; };
let cx = LayoutCx { target: &target }; let cx = LayoutCx { target: &target };

View file

@ -32,7 +32,7 @@ pub fn layout_of_adt_query(
trait_env: Arc<TraitEnvironment>, trait_env: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> { ) -> Result<Arc<Layout>, LayoutError> {
let krate = trait_env.krate; let krate = trait_env.krate;
let Some(target) = db.target_data_layout(krate) else { let Ok(target) = db.target_data_layout(krate) else {
return Err(LayoutError::TargetLayoutNotAvailable); return Err(LayoutError::TargetLayoutNotAvailable);
}; };
let cx = LayoutCx { target: &target }; let cx = LayoutCx { target: &target };

View file

@ -2,6 +2,7 @@
use base_db::CrateId; use base_db::CrateId;
use hir_def::layout::TargetDataLayout; use hir_def::layout::TargetDataLayout;
use rustc_abi::{AlignFromBytesError, TargetDataLayoutErrors};
use triomphe::Arc; use triomphe::Arc;
use crate::db::HirDatabase; use crate::db::HirDatabase;
@ -9,15 +10,40 @@ use crate::db::HirDatabase;
pub fn target_data_layout_query( pub fn target_data_layout_query(
db: &dyn HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
) -> Option<Arc<TargetDataLayout>> { ) -> Result<Arc<TargetDataLayout>, Arc<str>> {
let crate_graph = db.crate_graph(); let crate_graph = db.crate_graph();
let target_layout = crate_graph[krate].target_layout.as_ref().ok()?; let res = crate_graph[krate].target_layout.as_deref();
let res = TargetDataLayout::parse_from_llvm_datalayout_string(target_layout); match res {
if let Err(_e) = &res { Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it) {
// FIXME: Print the error here once it implements debug/display Ok(it) => Ok(Arc::new(it)),
// also logging here is somewhat wrong, but unfortunately this is the earliest place we can Err(e) => {
// parse that doesn't impose a dependency to the rust-abi crate for project-model Err(match e {
tracing::error!("Failed to parse target data layout for {krate:?}"); TargetDataLayoutErrors::InvalidAddressSpace { addr_space, cause, err } => {
format!(
r#"invalid address space `{addr_space}` for `{cause}` in "data-layout": {err}"#
)
}
TargetDataLayoutErrors::InvalidBits { kind, bit, cause, err } => format!(r#"invalid {kind} `{bit}` for `{cause}` in "data-layout": {err}"#),
TargetDataLayoutErrors::MissingAlignment { cause } => format!(r#"missing alignment for `{cause}` in "data-layout""#),
TargetDataLayoutErrors::InvalidAlignment { cause, err } => format!(
r#"invalid alignment for `{cause}` in "data-layout": `{align}` is {err_kind}"#,
align = err.align(),
err_kind = match err {
AlignFromBytesError::NotPowerOfTwo(_) => "not a power of two",
AlignFromBytesError::TooLarge(_) => "too large",
}
),
TargetDataLayoutErrors::InconsistentTargetArchitecture { dl, target } => {
format!(r#"inconsistent target specification: "data-layout" claims architecture is {dl}-endian, while "target-endian" is `{target}`"#)
}
TargetDataLayoutErrors::InconsistentTargetPointerWidth {
pointer_size,
target,
} => format!(r#"inconsistent target specification: "data-layout" claims pointers are {pointer_size}-bit, while "target-pointer-width" is `{target}`"#),
TargetDataLayoutErrors::InvalidBitsSize { err } => err,
}.into())
}
},
Err(e) => Err(Arc::from(&**e)),
} }
res.ok().map(Arc::new)
} }

View file

@ -363,7 +363,6 @@ has_interner!(CallableSig);
pub enum FnAbi { pub enum FnAbi {
Aapcs, Aapcs,
AapcsUnwind, AapcsUnwind,
AmdgpuKernel,
AvrInterrupt, AvrInterrupt,
AvrNonBlockingInterrupt, AvrNonBlockingInterrupt,
C, C,
@ -422,7 +421,6 @@ impl FnAbi {
match s { match s {
"aapcs-unwind" => FnAbi::AapcsUnwind, "aapcs-unwind" => FnAbi::AapcsUnwind,
"aapcs" => FnAbi::Aapcs, "aapcs" => FnAbi::Aapcs,
"amdgpu-kernel" => FnAbi::AmdgpuKernel,
"avr-interrupt" => FnAbi::AvrInterrupt, "avr-interrupt" => FnAbi::AvrInterrupt,
"avr-non-blocking-interrupt" => FnAbi::AvrNonBlockingInterrupt, "avr-non-blocking-interrupt" => FnAbi::AvrNonBlockingInterrupt,
"C-cmse-nonsecure-call" => FnAbi::CCmseNonsecureCall, "C-cmse-nonsecure-call" => FnAbi::CCmseNonsecureCall,
@ -465,7 +463,6 @@ impl FnAbi {
match self { match self {
FnAbi::Aapcs => "aapcs", FnAbi::Aapcs => "aapcs",
FnAbi::AapcsUnwind => "aapcs-unwind", FnAbi::AapcsUnwind => "aapcs-unwind",
FnAbi::AmdgpuKernel => "amdgpu-kernel",
FnAbi::AvrInterrupt => "avr-interrupt", FnAbi::AvrInterrupt => "avr-interrupt",
FnAbi::AvrNonBlockingInterrupt => "avr-non-blocking-interrupt", FnAbi::AvrNonBlockingInterrupt => "avr-non-blocking-interrupt",
FnAbi::C => "C", FnAbi::C => "C",

View file

@ -143,7 +143,8 @@ pub struct TraitImpls {
impl TraitImpls { impl TraitImpls {
pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> { pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
let _p = profile::span("trait_impls_in_crate_query").detail(|| format!("{krate:?}")); let _p =
tracing::span!(tracing::Level::INFO, "trait_impls_in_crate_query", ?krate).entered();
let mut impls = FxHashMap::default(); let mut impls = FxHashMap::default();
Self::collect_def_map(db, &mut impls, &db.crate_def_map(krate)); Self::collect_def_map(db, &mut impls, &db.crate_def_map(krate));
@ -155,7 +156,7 @@ impl TraitImpls {
db: &dyn HirDatabase, db: &dyn HirDatabase,
block: BlockId, block: BlockId,
) -> Option<Arc<Self>> { ) -> Option<Arc<Self>> {
let _p = profile::span("trait_impls_in_block_query"); let _p = tracing::span!(tracing::Level::INFO, "trait_impls_in_block_query").entered();
let mut impls = FxHashMap::default(); let mut impls = FxHashMap::default();
Self::collect_def_map(db, &mut impls, &db.block_def_map(block)); Self::collect_def_map(db, &mut impls, &db.block_def_map(block));
@ -171,7 +172,8 @@ impl TraitImpls {
db: &dyn HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
) -> Arc<[Arc<Self>]> { ) -> Arc<[Arc<Self>]> {
let _p = profile::span("trait_impls_in_deps_query").detail(|| format!("{krate:?}")); let _p =
tracing::span!(tracing::Level::INFO, "trait_impls_in_deps_query", ?krate).entered();
let crate_graph = db.crate_graph(); let crate_graph = db.crate_graph();
Arc::from_iter( Arc::from_iter(
@ -272,7 +274,8 @@ pub struct InherentImpls {
impl InherentImpls { impl InherentImpls {
pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> { pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
let _p = profile::span("inherent_impls_in_crate_query").detail(|| format!("{krate:?}")); let _p =
tracing::span!(tracing::Level::INFO, "inherent_impls_in_crate_query", ?krate).entered();
let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() }; let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
let crate_def_map = db.crate_def_map(krate); let crate_def_map = db.crate_def_map(krate);
@ -286,7 +289,7 @@ impl InherentImpls {
db: &dyn HirDatabase, db: &dyn HirDatabase,
block: BlockId, block: BlockId,
) -> Option<Arc<Self>> { ) -> Option<Arc<Self>> {
let _p = profile::span("inherent_impls_in_block_query"); let _p = tracing::span!(tracing::Level::INFO, "inherent_impls_in_block_query").entered();
let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() }; let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
let block_def_map = db.block_def_map(block); let block_def_map = db.block_def_map(block);
@ -359,7 +362,7 @@ pub(crate) fn incoherent_inherent_impl_crates(
krate: CrateId, krate: CrateId,
fp: TyFingerprint, fp: TyFingerprint,
) -> SmallVec<[CrateId; 2]> { ) -> SmallVec<[CrateId; 2]> {
let _p = profile::span("inherent_impl_crates_query"); let _p = tracing::span!(tracing::Level::INFO, "inherent_impl_crates_query").entered();
let mut res = SmallVec::new(); let mut res = SmallVec::new();
let crate_graph = db.crate_graph(); let crate_graph = db.crate_graph();

View file

@ -71,7 +71,7 @@ pub fn borrowck_query(
db: &dyn HirDatabase, db: &dyn HirDatabase,
def: DefWithBodyId, def: DefWithBodyId,
) -> Result<Arc<[BorrowckResult]>, MirLowerError> { ) -> Result<Arc<[BorrowckResult]>, MirLowerError> {
let _p = profile::span("borrowck_query"); let _p = tracing::span!(tracing::Level::INFO, "borrowck_query").entered();
let mut res = vec![]; let mut res = vec![];
all_mir_bodies(db, def, |body| { all_mir_bodies(db, def, |body| {
res.push(BorrowckResult { res.push(BorrowckResult {
@ -444,7 +444,7 @@ fn mutability_of_locals(
} }
if destination.projection.lookup(&body.projection_store).is_empty() { if destination.projection.lookup(&body.projection_store).is_empty() {
if ever_init_map.get(destination.local).copied().unwrap_or_default() { if ever_init_map.get(destination.local).copied().unwrap_or_default() {
push_mut_span(destination.local, MirSpan::Unknown, &mut result); push_mut_span(destination.local, terminator.span, &mut result);
} else { } else {
ever_init_map.insert(destination.local, true); ever_init_map.insert(destination.local, true);
} }

View file

@ -17,6 +17,7 @@ use hir_def::{
use hir_expand::{mod_path::ModPath, HirFileIdExt, InFile}; use hir_expand::{mod_path::ModPath, HirFileIdExt, InFile};
use intern::Interned; use intern::Interned;
use la_arena::ArenaMap; use la_arena::ArenaMap;
use rustc_abi::TargetDataLayout;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use stdx::never; use stdx::never;
use syntax::{SyntaxNodePtr, TextRange}; use syntax::{SyntaxNodePtr, TextRange};
@ -51,7 +52,7 @@ macro_rules! from_bytes {
($ty:tt, $value:expr) => { ($ty:tt, $value:expr) => {
($ty::from_le_bytes(match ($value).try_into() { ($ty::from_le_bytes(match ($value).try_into() {
Ok(it) => it, Ok(it) => it,
Err(_) => return Err(MirEvalError::TypeError(stringify!(mismatched size in constructing $ty))), Err(_) => return Err(MirEvalError::InternalError(stringify!(mismatched size in constructing $ty).into())),
})) }))
}; };
} }
@ -145,6 +146,7 @@ enum MirOrDynIndex {
pub struct Evaluator<'a> { pub struct Evaluator<'a> {
db: &'a dyn HirDatabase, db: &'a dyn HirDatabase,
trait_env: Arc<TraitEnvironment>, trait_env: Arc<TraitEnvironment>,
target_data_layout: Arc<TargetDataLayout>,
stack: Vec<u8>, stack: Vec<u8>,
heap: Vec<u8>, heap: Vec<u8>,
code_stack: Vec<StackFrame>, code_stack: Vec<StackFrame>,
@ -316,12 +318,12 @@ impl Address {
pub enum MirEvalError { pub enum MirEvalError {
ConstEvalError(String, Box<ConstEvalError>), ConstEvalError(String, Box<ConstEvalError>),
LayoutError(LayoutError, Ty), LayoutError(LayoutError, Ty),
/// Means that code had type errors (or mismatched args) and we shouldn't generate mir in first place. TargetDataLayoutNotAvailable(Arc<str>),
TypeError(&'static str),
/// Means that code had undefined behavior. We don't try to actively detect UB, but if it was detected /// Means that code had undefined behavior. We don't try to actively detect UB, but if it was detected
/// then use this type of error. /// then use this type of error.
UndefinedBehavior(String), UndefinedBehavior(String),
Panic(String), Panic(String),
// FIXME: This should be folded into ConstEvalError?
MirLowerError(FunctionId, MirLowerError), MirLowerError(FunctionId, MirLowerError),
MirLowerErrorForClosure(ClosureId, MirLowerError), MirLowerErrorForClosure(ClosureId, MirLowerError),
TypeIsUnsized(Ty, &'static str), TypeIsUnsized(Ty, &'static str),
@ -330,11 +332,12 @@ pub enum MirEvalError {
InFunction(Box<MirEvalError>, Vec<(Either<FunctionId, ClosureId>, MirSpan, DefWithBodyId)>), InFunction(Box<MirEvalError>, Vec<(Either<FunctionId, ClosureId>, MirSpan, DefWithBodyId)>),
ExecutionLimitExceeded, ExecutionLimitExceeded,
StackOverflow, StackOverflow,
TargetDataLayoutNotAvailable, /// FIXME: Fold this into InternalError
InvalidVTableId(usize), InvalidVTableId(usize),
/// ?
CoerceUnsizedError(Ty), CoerceUnsizedError(Ty),
LangItemNotFound(LangItem), /// These should not occur, usually indicates a bug in mir lowering.
BrokenLayout(Box<Layout>), InternalError(Box<str>),
} }
impl MirEvalError { impl MirEvalError {
@ -359,8 +362,8 @@ impl MirEvalError {
func func
)?; )?;
} }
Either::Right(clos) => { Either::Right(closure) => {
writeln!(f, "In {:?}", clos)?; writeln!(f, "In {:?}", closure)?;
} }
} }
let source_map = db.body_with_source_map(*def).1; let source_map = db.body_with_source_map(*def).1;
@ -406,8 +409,8 @@ impl MirEvalError {
span_formatter, span_formatter,
)?; )?;
} }
MirEvalError::TypeError(_) MirEvalError::UndefinedBehavior(_)
| MirEvalError::UndefinedBehavior(_) | MirEvalError::TargetDataLayoutNotAvailable(_)
| MirEvalError::Panic(_) | MirEvalError::Panic(_)
| MirEvalError::MirLowerErrorForClosure(_, _) | MirEvalError::MirLowerErrorForClosure(_, _)
| MirEvalError::TypeIsUnsized(_, _) | MirEvalError::TypeIsUnsized(_, _)
@ -415,10 +418,8 @@ impl MirEvalError {
| MirEvalError::InvalidConst(_) | MirEvalError::InvalidConst(_)
| MirEvalError::ExecutionLimitExceeded | MirEvalError::ExecutionLimitExceeded
| MirEvalError::StackOverflow | MirEvalError::StackOverflow
| MirEvalError::TargetDataLayoutNotAvailable
| MirEvalError::CoerceUnsizedError(_) | MirEvalError::CoerceUnsizedError(_)
| MirEvalError::LangItemNotFound(_) | MirEvalError::InternalError(_)
| MirEvalError::BrokenLayout(_)
| MirEvalError::InvalidVTableId(_) => writeln!(f, "{:?}", err)?, | MirEvalError::InvalidVTableId(_) => writeln!(f, "{:?}", err)?,
} }
Ok(()) Ok(())
@ -431,16 +432,16 @@ impl std::fmt::Debug for MirEvalError {
Self::ConstEvalError(arg0, arg1) => { Self::ConstEvalError(arg0, arg1) => {
f.debug_tuple("ConstEvalError").field(arg0).field(arg1).finish() f.debug_tuple("ConstEvalError").field(arg0).field(arg1).finish()
} }
Self::LangItemNotFound(arg0) => f.debug_tuple("LangItemNotFound").field(arg0).finish(),
Self::LayoutError(arg0, arg1) => { Self::LayoutError(arg0, arg1) => {
f.debug_tuple("LayoutError").field(arg0).field(arg1).finish() f.debug_tuple("LayoutError").field(arg0).field(arg1).finish()
} }
Self::TypeError(arg0) => f.debug_tuple("TypeError").field(arg0).finish(),
Self::UndefinedBehavior(arg0) => { Self::UndefinedBehavior(arg0) => {
f.debug_tuple("UndefinedBehavior").field(arg0).finish() f.debug_tuple("UndefinedBehavior").field(arg0).finish()
} }
Self::Panic(msg) => write!(f, "Panic with message:\n{msg:?}"), Self::Panic(msg) => write!(f, "Panic with message:\n{msg:?}"),
Self::TargetDataLayoutNotAvailable => write!(f, "TargetDataLayoutNotAvailable"), Self::TargetDataLayoutNotAvailable(arg0) => {
f.debug_tuple("TargetDataLayoutNotAvailable").field(arg0).finish()
}
Self::TypeIsUnsized(ty, it) => write!(f, "{ty:?} is unsized. {it} should be sized."), Self::TypeIsUnsized(ty, it) => write!(f, "{ty:?} is unsized. {it} should be sized."),
Self::ExecutionLimitExceeded => write!(f, "execution limit exceeded"), Self::ExecutionLimitExceeded => write!(f, "execution limit exceeded"),
Self::StackOverflow => write!(f, "stack overflow"), Self::StackOverflow => write!(f, "stack overflow"),
@ -453,7 +454,7 @@ impl std::fmt::Debug for MirEvalError {
Self::CoerceUnsizedError(arg0) => { Self::CoerceUnsizedError(arg0) => {
f.debug_tuple("CoerceUnsizedError").field(arg0).finish() f.debug_tuple("CoerceUnsizedError").field(arg0).finish()
} }
Self::BrokenLayout(arg0) => f.debug_tuple("BrokenLayout").field(arg0).finish(), Self::InternalError(arg0) => f.debug_tuple("InternalError").field(arg0).finish(),
Self::InvalidVTableId(arg0) => f.debug_tuple("InvalidVTableId").field(arg0).finish(), Self::InvalidVTableId(arg0) => f.debug_tuple("InvalidVTableId").field(arg0).finish(),
Self::NotSupported(arg0) => f.debug_tuple("NotSupported").field(arg0).finish(), Self::NotSupported(arg0) => f.debug_tuple("NotSupported").field(arg0).finish(),
Self::InvalidConst(arg0) => { Self::InvalidConst(arg0) => {
@ -530,7 +531,11 @@ pub fn interpret_mir(
trait_env: Option<Arc<TraitEnvironment>>, trait_env: Option<Arc<TraitEnvironment>>,
) -> (Result<Const>, MirOutput) { ) -> (Result<Const>, MirOutput) {
let ty = body.locals[return_slot()].ty.clone(); let ty = body.locals[return_slot()].ty.clone();
let mut evaluator = Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env); let mut evaluator =
match Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env) {
Ok(it) => it,
Err(e) => return (Err(e), MirOutput { stdout: vec![], stderr: vec![] }),
};
let it: Result<Const> = (|| { let it: Result<Const> = (|| {
if evaluator.ptr_size() != std::mem::size_of::<usize>() { if evaluator.ptr_size() != std::mem::size_of::<usize>() {
not_supported!("targets with different pointer size from host"); not_supported!("targets with different pointer size from host");
@ -566,9 +571,15 @@ impl Evaluator<'_> {
owner: DefWithBodyId, owner: DefWithBodyId,
assert_placeholder_ty_is_unused: bool, assert_placeholder_ty_is_unused: bool,
trait_env: Option<Arc<TraitEnvironment>>, trait_env: Option<Arc<TraitEnvironment>>,
) -> Evaluator<'_> { ) -> Result<Evaluator<'_>> {
let crate_id = owner.module(db.upcast()).krate(); let crate_id = owner.module(db.upcast()).krate();
Evaluator { let target_data_layout = match db.target_data_layout(crate_id) {
Ok(target_data_layout) => target_data_layout,
Err(e) => return Err(MirEvalError::TargetDataLayoutNotAvailable(e)),
};
let cached_ptr_size = target_data_layout.pointer_size.bytes_usize();
Ok(Evaluator {
target_data_layout,
stack: vec![0], stack: vec![0],
heap: vec![0], heap: vec![0],
code_stack: vec![], code_stack: vec![],
@ -590,10 +601,7 @@ impl Evaluator<'_> {
not_special_fn_cache: RefCell::new(Default::default()), not_special_fn_cache: RefCell::new(Default::default()),
mir_or_dyn_index_cache: RefCell::new(Default::default()), mir_or_dyn_index_cache: RefCell::new(Default::default()),
unused_locals_store: RefCell::new(Default::default()), unused_locals_store: RefCell::new(Default::default()),
cached_ptr_size: match db.target_data_layout(crate_id) { cached_ptr_size,
Some(it) => it.pointer_size.bytes_usize(),
None => 8,
},
cached_fn_trait_func: db cached_fn_trait_func: db
.lang_item(crate_id, LangItem::Fn) .lang_item(crate_id, LangItem::Fn)
.and_then(|x| x.as_trait()) .and_then(|x| x.as_trait())
@ -606,7 +614,7 @@ impl Evaluator<'_> {
.lang_item(crate_id, LangItem::FnOnce) .lang_item(crate_id, LangItem::FnOnce)
.and_then(|x| x.as_trait()) .and_then(|x| x.as_trait())
.and_then(|x| db.trait_data(x).method_by_name(&name![call_once])), .and_then(|x| db.trait_data(x).method_by_name(&name![call_once])),
} })
} }
fn place_addr(&self, p: &Place, locals: &Locals) -> Result<Address> { fn place_addr(&self, p: &Place, locals: &Locals) -> Result<Address> {
@ -754,8 +762,8 @@ impl Evaluator<'_> {
RustcEnumVariantIdx(it.lookup(self.db.upcast()).index as usize) RustcEnumVariantIdx(it.lookup(self.db.upcast()).index as usize)
} }
_ => { _ => {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"Multivariant layout only happens for enums", "mismatched layout".into(),
)) ))
} }
}] }]
@ -993,12 +1001,12 @@ impl Evaluator<'_> {
IntervalOrOwned::Borrowed(value) => interval.write_from_interval(self, value)?, IntervalOrOwned::Borrowed(value) => interval.write_from_interval(self, value)?,
} }
if remain_args == 0 { if remain_args == 0 {
return Err(MirEvalError::TypeError("more arguments provided")); return Err(MirEvalError::InternalError("too many arguments".into()));
} }
remain_args -= 1; remain_args -= 1;
} }
if remain_args > 0 { if remain_args > 0 {
return Err(MirEvalError::TypeError("not enough arguments provided")); return Err(MirEvalError::InternalError("too few arguments".into()));
} }
Ok(()) Ok(())
} }
@ -1071,8 +1079,8 @@ impl Evaluator<'_> {
match metadata { match metadata {
Some(m) => m, Some(m) => m,
None => { None => {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"type without metadata is used for Rvalue::Len", "type without metadata is used for Rvalue::Len".into(),
)); ));
} }
} }
@ -1312,7 +1320,7 @@ impl Evaluator<'_> {
} }
AggregateKind::Tuple(ty) => { AggregateKind::Tuple(ty) => {
let layout = self.layout(ty)?; let layout = self.layout(ty)?;
Owned(self.make_by_layout( Owned(self.construct_with_layout(
layout.size.bytes_usize(), layout.size.bytes_usize(),
&layout, &layout,
None, None,
@ -1334,7 +1342,7 @@ impl Evaluator<'_> {
AggregateKind::Adt(it, subst) => { AggregateKind::Adt(it, subst) => {
let (size, variant_layout, tag) = let (size, variant_layout, tag) =
self.layout_of_variant(*it, subst.clone(), locals)?; self.layout_of_variant(*it, subst.clone(), locals)?;
Owned(self.make_by_layout( Owned(self.construct_with_layout(
size, size,
&variant_layout, &variant_layout,
tag, tag,
@ -1343,7 +1351,7 @@ impl Evaluator<'_> {
} }
AggregateKind::Closure(ty) => { AggregateKind::Closure(ty) => {
let layout = self.layout(ty)?; let layout = self.layout(ty)?;
Owned(self.make_by_layout( Owned(self.construct_with_layout(
layout.size.bytes_usize(), layout.size.bytes_usize(),
&layout, &layout,
None, None,
@ -1415,10 +1423,7 @@ impl Evaluator<'_> {
Ok(r) Ok(r)
} }
Variants::Multiple { tag, tag_encoding, variants, .. } => { Variants::Multiple { tag, tag_encoding, variants, .. } => {
let Some(target_data_layout) = self.db.target_data_layout(self.crate_id) else { let size = tag.size(&*self.target_data_layout).bytes_usize();
not_supported!("missing target data layout");
};
let size = tag.size(&*target_data_layout).bytes_usize();
let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field
match tag_encoding { match tag_encoding {
TagEncoding::Direct => { TagEncoding::Direct => {
@ -1458,9 +1463,8 @@ impl Evaluator<'_> {
if let TyKind::Adt(id, subst) = kind { if let TyKind::Adt(id, subst) = kind {
if let AdtId::StructId(struct_id) = id.0 { if let AdtId::StructId(struct_id) = id.0 {
let field_types = self.db.field_types(struct_id.into()); let field_types = self.db.field_types(struct_id.into());
let mut field_types = field_types.iter();
if let Some(ty) = if let Some(ty) =
field_types.next().map(|it| it.1.clone().substitute(Interner, subst)) field_types.iter().last().map(|it| it.1.clone().substitute(Interner, subst))
{ {
return self.coerce_unsized_look_through_fields(&ty, goal); return self.coerce_unsized_look_through_fields(&ty, goal);
} }
@ -1578,10 +1582,6 @@ impl Evaluator<'_> {
Ok(match &layout.variants { Ok(match &layout.variants {
Variants::Single { .. } => (layout.size.bytes_usize(), layout, None), Variants::Single { .. } => (layout.size.bytes_usize(), layout, None),
Variants::Multiple { variants, tag, tag_encoding, .. } => { Variants::Multiple { variants, tag, tag_encoding, .. } => {
let cx = self
.db
.target_data_layout(self.crate_id)
.ok_or(MirEvalError::TargetDataLayoutNotAvailable)?;
let enum_variant_id = match it { let enum_variant_id = match it {
VariantId::EnumVariantId(it) => it, VariantId::EnumVariantId(it) => it,
_ => not_supported!("multi variant layout for non-enums"), _ => not_supported!("multi variant layout for non-enums"),
@ -1612,7 +1612,7 @@ impl Evaluator<'_> {
if have_tag { if have_tag {
Some(( Some((
layout.fields.offset(0).bytes_usize(), layout.fields.offset(0).bytes_usize(),
tag.size(&*cx).bytes_usize(), tag.size(&*self.target_data_layout).bytes_usize(),
discriminant, discriminant,
)) ))
} else { } else {
@ -1623,7 +1623,7 @@ impl Evaluator<'_> {
}) })
} }
fn make_by_layout( fn construct_with_layout(
&mut self, &mut self,
size: usize, // Not necessarily equal to variant_layout.size size: usize, // Not necessarily equal to variant_layout.size
variant_layout: &Layout, variant_layout: &Layout,
@ -1634,7 +1634,14 @@ impl Evaluator<'_> {
if let Some((offset, size, value)) = tag { if let Some((offset, size, value)) = tag {
match result.get_mut(offset..offset + size) { match result.get_mut(offset..offset + size) {
Some(it) => it.copy_from_slice(&value.to_le_bytes()[0..size]), Some(it) => it.copy_from_slice(&value.to_le_bytes()[0..size]),
None => return Err(MirEvalError::BrokenLayout(Box::new(variant_layout.clone()))), None => {
return Err(MirEvalError::InternalError(
format!(
"encoded tag ({offset}, {size}, {value}) is out of bounds 0..{size}"
)
.into(),
))
}
} }
} }
for (i, op) in values.enumerate() { for (i, op) in values.enumerate() {
@ -1642,7 +1649,11 @@ impl Evaluator<'_> {
let op = op.get(self)?; let op = op.get(self)?;
match result.get_mut(offset..offset + op.len()) { match result.get_mut(offset..offset + op.len()) {
Some(it) => it.copy_from_slice(op), Some(it) => it.copy_from_slice(op),
None => return Err(MirEvalError::BrokenLayout(Box::new(variant_layout.clone()))), None => {
return Err(MirEvalError::InternalError(
format!("field offset ({offset}) is out of bounds 0..{size}").into(),
))
}
} }
} }
Ok(result) Ok(result)
@ -1695,28 +1706,29 @@ impl Evaluator<'_> {
} }
ConstScalar::Unknown => not_supported!("evaluating unknown const"), ConstScalar::Unknown => not_supported!("evaluating unknown const"),
}; };
let mut v: Cow<'_, [u8]> = Cow::Borrowed(v);
let patch_map = memory_map.transform_addresses(|b, align| { let patch_map = memory_map.transform_addresses(|b, align| {
let addr = self.heap_allocate(b.len(), align)?; let addr = self.heap_allocate(b.len(), align)?;
self.write_memory(addr, b)?; self.write_memory(addr, b)?;
Ok(addr.to_usize()) Ok(addr.to_usize())
})?; })?;
let (size, align) = self.size_align_of(ty, locals)?.unwrap_or((v.len(), 1)); let (size, align) = self.size_align_of(ty, locals)?.unwrap_or((v.len(), 1));
if size != v.len() { let v: Cow<'_, [u8]> = if size != v.len() {
// Handle self enum // Handle self enum
if size == 16 && v.len() < 16 { if size == 16 && v.len() < 16 {
v = Cow::Owned(pad16(&v, false).to_vec()); Cow::Owned(pad16(v, false).to_vec())
} else if size < 16 && v.len() == 16 { } else if size < 16 && v.len() == 16 {
v = Cow::Owned(v[0..size].to_vec()); Cow::Borrowed(&v[0..size])
} else { } else {
return Err(MirEvalError::InvalidConst(konst.clone())); return Err(MirEvalError::InvalidConst(konst.clone()));
} }
} } else {
Cow::Borrowed(v)
};
let addr = self.heap_allocate(size, align)?; let addr = self.heap_allocate(size, align)?;
self.write_memory(addr, &v)?; self.write_memory(addr, &v)?;
self.patch_addresses( self.patch_addresses(
&patch_map, &patch_map,
|bytes| match &memory_map { |bytes| match memory_map {
MemoryMap::Empty | MemoryMap::Simple(_) => { MemoryMap::Empty | MemoryMap::Simple(_) => {
Err(MirEvalError::InvalidVTableId(from_bytes!(usize, bytes))) Err(MirEvalError::InvalidVTableId(from_bytes!(usize, bytes)))
} }
@ -2000,7 +2012,7 @@ impl Evaluator<'_> {
if let Some((v, l)) = detect_variant_from_bytes( if let Some((v, l)) = detect_variant_from_bytes(
&layout, &layout,
this.db, this.db,
this.trait_env.clone(), &this.target_data_layout,
bytes, bytes,
e, e,
) { ) {
@ -2079,7 +2091,7 @@ impl Evaluator<'_> {
if let Some((ev, layout)) = detect_variant_from_bytes( if let Some((ev, layout)) = detect_variant_from_bytes(
&layout, &layout,
self.db, self.db,
self.trait_env.clone(), &self.target_data_layout,
self.read_memory(addr, layout.size.bytes_usize())?, self.read_memory(addr, layout.size.bytes_usize())?,
e, e,
) { ) {
@ -2153,14 +2165,14 @@ impl Evaluator<'_> {
) -> Result<Option<StackFrame>> { ) -> Result<Option<StackFrame>> {
let id = from_bytes!(usize, bytes.get(self)?); let id = from_bytes!(usize, bytes.get(self)?);
let next_ty = self.vtable_map.ty(id)?.clone(); let next_ty = self.vtable_map.ty(id)?.clone();
match &next_ty.kind(Interner) { match next_ty.kind(Interner) {
TyKind::FnDef(def, generic_args) => { TyKind::FnDef(def, generic_args) => {
self.exec_fn_def(*def, generic_args, destination, args, locals, target_bb, span) self.exec_fn_def(*def, generic_args, destination, args, locals, target_bb, span)
} }
TyKind::Closure(id, subst) => { TyKind::Closure(id, subst) => {
self.exec_closure(*id, bytes.slice(0..0), subst, destination, args, locals, span) self.exec_closure(*id, bytes.slice(0..0), subst, destination, args, locals, span)
} }
_ => Err(MirEvalError::TypeError("function pointer to non function")), _ => Err(MirEvalError::InternalError("function pointer to non function".into())),
} }
} }
@ -2241,7 +2253,7 @@ impl Evaluator<'_> {
CallableDefId::StructId(id) => { CallableDefId::StructId(id) => {
let (size, variant_layout, tag) = let (size, variant_layout, tag) =
self.layout_of_variant(id.into(), generic_args, locals)?; self.layout_of_variant(id.into(), generic_args, locals)?;
let result = self.make_by_layout( let result = self.construct_with_layout(
size, size,
&variant_layout, &variant_layout,
tag, tag,
@ -2253,7 +2265,7 @@ impl Evaluator<'_> {
CallableDefId::EnumVariantId(id) => { CallableDefId::EnumVariantId(id) => {
let (size, variant_layout, tag) = let (size, variant_layout, tag) =
self.layout_of_variant(id.into(), generic_args, locals)?; self.layout_of_variant(id.into(), generic_args, locals)?;
let result = self.make_by_layout( let result = self.construct_with_layout(
size, size,
&variant_layout, &variant_layout,
tag, tag,
@ -2407,7 +2419,9 @@ impl Evaluator<'_> {
target_bb: Option<BasicBlockId>, target_bb: Option<BasicBlockId>,
span: MirSpan, span: MirSpan,
) -> Result<Option<StackFrame>> { ) -> Result<Option<StackFrame>> {
let func = args.first().ok_or(MirEvalError::TypeError("fn trait with no arg"))?; let func = args
.first()
.ok_or_else(|| MirEvalError::InternalError("fn trait with no arg".into()))?;
let mut func_ty = func.ty.clone(); let mut func_ty = func.ty.clone();
let mut func_data = func.interval; let mut func_data = func.interval;
while let TyKind::Ref(_, _, z) = func_ty.kind(Interner) { while let TyKind::Ref(_, _, z) = func_ty.kind(Interner) {
@ -2450,7 +2464,7 @@ impl Evaluator<'_> {
) )
.intern(Interner); .intern(Interner);
let layout = self.layout(&ty)?; let layout = self.layout(&ty)?;
let result = self.make_by_layout( let result = self.construct_with_layout(
layout.size.bytes_usize(), layout.size.bytes_usize(),
&layout, &layout,
None, None,
@ -2634,7 +2648,7 @@ pub fn render_const_using_debug_impl(
owner: ConstId, owner: ConstId,
c: &Const, c: &Const,
) -> Result<String> { ) -> Result<String> {
let mut evaluator = Evaluator::new(db, owner.into(), false, None); let mut evaluator = Evaluator::new(db, owner.into(), false, None)?;
let locals = &Locals { let locals = &Locals {
ptr: ArenaMap::new(), ptr: ArenaMap::new(),
body: db body: db
@ -2699,12 +2713,7 @@ pub fn render_const_using_debug_impl(
pub fn pad16(it: &[u8], is_signed: bool) -> [u8; 16] { pub fn pad16(it: &[u8], is_signed: bool) -> [u8; 16] {
let is_negative = is_signed && it.last().unwrap_or(&0) > &127; let is_negative = is_signed && it.last().unwrap_or(&0) > &127;
let fill_with = if is_negative { 255 } else { 0 }; let mut res = [if is_negative { 255 } else { 0 }; 16];
it.iter() res[..it.len()].copy_from_slice(it);
.copied() res
.chain(iter::repeat(fill_with))
.take(16)
.collect::<Vec<u8>>()
.try_into()
.expect("iterator take is not working")
} }

View file

@ -18,7 +18,7 @@ macro_rules! from_bytes {
($ty:tt, $value:expr) => { ($ty:tt, $value:expr) => {
($ty::from_le_bytes(match ($value).try_into() { ($ty::from_le_bytes(match ($value).try_into() {
Ok(it) => it, Ok(it) => it,
Err(_) => return Err(MirEvalError::TypeError("mismatched size")), Err(_) => return Err(MirEvalError::InternalError("mismatched size".into())),
})) }))
}; };
} }
@ -249,7 +249,9 @@ impl Evaluator<'_> {
match alloc_fn { match alloc_fn {
"rustc_allocator_zeroed" | "rustc_allocator" => { "rustc_allocator_zeroed" | "rustc_allocator" => {
let [size, align] = args else { let [size, align] = args else {
return Err(MirEvalError::TypeError("rustc_allocator args are not provided")); return Err(MirEvalError::InternalError(
"rustc_allocator args are not provided".into(),
));
}; };
let size = from_bytes!(usize, size.get(self)?); let size = from_bytes!(usize, size.get(self)?);
let align = from_bytes!(usize, align.get(self)?); let align = from_bytes!(usize, align.get(self)?);
@ -259,7 +261,9 @@ impl Evaluator<'_> {
"rustc_deallocator" => { /* no-op for now */ } "rustc_deallocator" => { /* no-op for now */ }
"rustc_reallocator" => { "rustc_reallocator" => {
let [ptr, old_size, align, new_size] = args else { let [ptr, old_size, align, new_size] = args else {
return Err(MirEvalError::TypeError("rustc_allocator args are not provided")); return Err(MirEvalError::InternalError(
"rustc_allocator args are not provided".into(),
));
}; };
let old_size = from_bytes!(usize, old_size.get(self)?); let old_size = from_bytes!(usize, old_size.get(self)?);
let new_size = from_bytes!(usize, new_size.get(self)?); let new_size = from_bytes!(usize, new_size.get(self)?);
@ -339,22 +343,22 @@ impl Evaluator<'_> {
Err(MirEvalError::Panic(message)) Err(MirEvalError::Panic(message))
} }
SliceLen => { SliceLen => {
let arg = args let arg = args.next().ok_or(MirEvalError::InternalError(
.next() "argument of <[T]>::len() is not provided".into(),
.ok_or(MirEvalError::TypeError("argument of <[T]>::len() is not provided"))?; ))?;
let ptr_size = arg.len() / 2; let ptr_size = arg.len() / 2;
Ok(arg[ptr_size..].into()) Ok(arg[ptr_size..].into())
} }
DropInPlace => { DropInPlace => {
let ty = let ty =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)).ok_or( generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)).ok_or(
MirEvalError::TypeError( MirEvalError::InternalError(
"generic argument of drop_in_place is not provided", "generic argument of drop_in_place is not provided".into(),
), ),
)?; )?;
let arg = args let arg = args.next().ok_or(MirEvalError::InternalError(
.next() "argument of drop_in_place is not provided".into(),
.ok_or(MirEvalError::TypeError("argument of drop_in_place is not provided"))?; ))?;
self.run_drop_glue_deep( self.run_drop_glue_deep(
ty.clone(), ty.clone(),
locals, locals,
@ -380,7 +384,9 @@ impl Evaluator<'_> {
318 => { 318 => {
// SYS_getrandom // SYS_getrandom
let [buf, len, _flags] = args else { let [buf, len, _flags] = args else {
return Err(MirEvalError::TypeError("SYS_getrandom args are not provided")); return Err(MirEvalError::InternalError(
"SYS_getrandom args are not provided".into(),
));
}; };
let addr = Address::from_bytes(buf.get(self)?)?; let addr = Address::from_bytes(buf.get(self)?)?;
let size = from_bytes!(usize, len.get(self)?); let size = from_bytes!(usize, len.get(self)?);
@ -408,7 +414,7 @@ impl Evaluator<'_> {
match as_str { match as_str {
"memcmp" => { "memcmp" => {
let [ptr1, ptr2, size] = args else { let [ptr1, ptr2, size] = args else {
return Err(MirEvalError::TypeError("memcmp args are not provided")); return Err(MirEvalError::InternalError("memcmp args are not provided".into()));
}; };
let addr1 = Address::from_bytes(ptr1.get(self)?)?; let addr1 = Address::from_bytes(ptr1.get(self)?)?;
let addr2 = Address::from_bytes(ptr2.get(self)?)?; let addr2 = Address::from_bytes(ptr2.get(self)?)?;
@ -424,7 +430,9 @@ impl Evaluator<'_> {
} }
"write" => { "write" => {
let [fd, ptr, len] = args else { let [fd, ptr, len] = args else {
return Err(MirEvalError::TypeError("libc::write args are not provided")); return Err(MirEvalError::InternalError(
"libc::write args are not provided".into(),
));
}; };
let fd = u128::from_le_bytes(pad16(fd.get(self)?, false)); let fd = u128::from_le_bytes(pad16(fd.get(self)?, false));
let interval = Interval { let interval = Interval {
@ -446,14 +454,16 @@ impl Evaluator<'_> {
"pthread_key_create" => { "pthread_key_create" => {
let key = self.thread_local_storage.create_key(); let key = self.thread_local_storage.create_key();
let Some(arg0) = args.first() else { let Some(arg0) = args.first() else {
return Err(MirEvalError::TypeError("pthread_key_create arg0 is not provided")); return Err(MirEvalError::InternalError(
"pthread_key_create arg0 is not provided".into(),
));
}; };
let arg0_addr = Address::from_bytes(arg0.get(self)?)?; let arg0_addr = Address::from_bytes(arg0.get(self)?)?;
let key_ty = if let Some((ty, ..)) = arg0.ty.as_reference_or_ptr() { let key_ty = if let Some((ty, ..)) = arg0.ty.as_reference_or_ptr() {
ty ty
} else { } else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"pthread_key_create arg0 is not a pointer", "pthread_key_create arg0 is not a pointer".into(),
)); ));
}; };
let arg0_interval = Interval::new( let arg0_interval = Interval::new(
@ -467,8 +477,8 @@ impl Evaluator<'_> {
} }
"pthread_getspecific" => { "pthread_getspecific" => {
let Some(arg0) = args.first() else { let Some(arg0) = args.first() else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"pthread_getspecific arg0 is not provided", "pthread_getspecific arg0 is not provided".into(),
)); ));
}; };
let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]); let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
@ -478,14 +488,14 @@ impl Evaluator<'_> {
} }
"pthread_setspecific" => { "pthread_setspecific" => {
let Some(arg0) = args.first() else { let Some(arg0) = args.first() else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"pthread_setspecific arg0 is not provided", "pthread_setspecific arg0 is not provided".into(),
)); ));
}; };
let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]); let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
let Some(arg1) = args.get(1) else { let Some(arg1) = args.get(1) else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"pthread_setspecific arg1 is not provided", "pthread_setspecific arg1 is not provided".into(),
)); ));
}; };
let value = from_bytes!(u128, pad16(arg1.get(self)?, false)); let value = from_bytes!(u128, pad16(arg1.get(self)?, false));
@ -502,14 +512,16 @@ impl Evaluator<'_> {
} }
"syscall" => { "syscall" => {
let Some((id, rest)) = args.split_first() else { let Some((id, rest)) = args.split_first() else {
return Err(MirEvalError::TypeError("syscall arg1 is not provided")); return Err(MirEvalError::InternalError("syscall arg1 is not provided".into()));
}; };
let id = from_bytes!(i64, id.get(self)?); let id = from_bytes!(i64, id.get(self)?);
self.exec_syscall(id, rest, destination, locals, span) self.exec_syscall(id, rest, destination, locals, span)
} }
"sched_getaffinity" => { "sched_getaffinity" => {
let [_pid, _set_size, set] = args else { let [_pid, _set_size, set] = args else {
return Err(MirEvalError::TypeError("libc::write args are not provided")); return Err(MirEvalError::InternalError(
"libc::write args are not provided".into(),
));
}; };
let set = Address::from_bytes(set.get(self)?)?; let set = Address::from_bytes(set.get(self)?)?;
// Only enable core 0 (we are single threaded anyway), which is bitset 0x0000001 // Only enable core 0 (we are single threaded anyway), which is bitset 0x0000001
@ -520,7 +532,9 @@ impl Evaluator<'_> {
} }
"getenv" => { "getenv" => {
let [name] = args else { let [name] = args else {
return Err(MirEvalError::TypeError("libc::write args are not provided")); return Err(MirEvalError::InternalError(
"libc::write args are not provided".into(),
));
}; };
let mut name_buf = vec![]; let mut name_buf = vec![];
let name = { let name = {
@ -586,8 +600,8 @@ impl Evaluator<'_> {
"sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs" "sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
| "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => { | "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => {
let [arg] = args else { let [arg] = args else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"f64 intrinsic signature doesn't match fn (f64) -> f64", "f64 intrinsic signature doesn't match fn (f64) -> f64".into(),
)); ));
}; };
let arg = from_bytes!(f64, arg.get(self)?); let arg = from_bytes!(f64, arg.get(self)?);
@ -614,8 +628,8 @@ impl Evaluator<'_> {
} }
"pow" | "minnum" | "maxnum" | "copysign" => { "pow" | "minnum" | "maxnum" | "copysign" => {
let [arg1, arg2] = args else { let [arg1, arg2] = args else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"f64 intrinsic signature doesn't match fn (f64, f64) -> f64", "f64 intrinsic signature doesn't match fn (f64, f64) -> f64".into(),
)); ));
}; };
let arg1 = from_bytes!(f64, arg1.get(self)?); let arg1 = from_bytes!(f64, arg1.get(self)?);
@ -630,8 +644,8 @@ impl Evaluator<'_> {
} }
"powi" => { "powi" => {
let [arg1, arg2] = args else { let [arg1, arg2] = args else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"powif64 signature doesn't match fn (f64, i32) -> f64", "powif64 signature doesn't match fn (f64, i32) -> f64".into(),
)); ));
}; };
let arg1 = from_bytes!(f64, arg1.get(self)?); let arg1 = from_bytes!(f64, arg1.get(self)?);
@ -640,8 +654,8 @@ impl Evaluator<'_> {
} }
"fma" => { "fma" => {
let [arg1, arg2, arg3] = args else { let [arg1, arg2, arg3] = args else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"fmaf64 signature doesn't match fn (f64, f64, f64) -> f64", "fmaf64 signature doesn't match fn (f64, f64, f64) -> f64".into(),
)); ));
}; };
let arg1 = from_bytes!(f64, arg1.get(self)?); let arg1 = from_bytes!(f64, arg1.get(self)?);
@ -658,8 +672,8 @@ impl Evaluator<'_> {
"sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs" "sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
| "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => { | "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => {
let [arg] = args else { let [arg] = args else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"f32 intrinsic signature doesn't match fn (f32) -> f32", "f32 intrinsic signature doesn't match fn (f32) -> f32".into(),
)); ));
}; };
let arg = from_bytes!(f32, arg.get(self)?); let arg = from_bytes!(f32, arg.get(self)?);
@ -686,8 +700,8 @@ impl Evaluator<'_> {
} }
"pow" | "minnum" | "maxnum" | "copysign" => { "pow" | "minnum" | "maxnum" | "copysign" => {
let [arg1, arg2] = args else { let [arg1, arg2] = args else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"f32 intrinsic signature doesn't match fn (f32, f32) -> f32", "f32 intrinsic signature doesn't match fn (f32, f32) -> f32".into(),
)); ));
}; };
let arg1 = from_bytes!(f32, arg1.get(self)?); let arg1 = from_bytes!(f32, arg1.get(self)?);
@ -702,8 +716,8 @@ impl Evaluator<'_> {
} }
"powi" => { "powi" => {
let [arg1, arg2] = args else { let [arg1, arg2] = args else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"powif32 signature doesn't match fn (f32, i32) -> f32", "powif32 signature doesn't match fn (f32, i32) -> f32".into(),
)); ));
}; };
let arg1 = from_bytes!(f32, arg1.get(self)?); let arg1 = from_bytes!(f32, arg1.get(self)?);
@ -712,8 +726,8 @@ impl Evaluator<'_> {
} }
"fma" => { "fma" => {
let [arg1, arg2, arg3] = args else { let [arg1, arg2, arg3] = args else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"fmaf32 signature doesn't match fn (f32, f32, f32) -> f32", "fmaf32 signature doesn't match fn (f32, f32, f32) -> f32".into(),
)); ));
}; };
let arg1 = from_bytes!(f32, arg1.get(self)?); let arg1 = from_bytes!(f32, arg1.get(self)?);
@ -730,7 +744,9 @@ impl Evaluator<'_> {
let Some(ty) = let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError("size_of generic arg is not provided")); return Err(MirEvalError::InternalError(
"size_of generic arg is not provided".into(),
));
}; };
let size = self.size_of_sized(ty, locals, "size_of arg")?; let size = self.size_of_sized(ty, locals, "size_of arg")?;
destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size]) destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size])
@ -739,7 +755,9 @@ impl Evaluator<'_> {
let Some(ty) = let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError("align_of generic arg is not provided")); return Err(MirEvalError::InternalError(
"align_of generic arg is not provided".into(),
));
}; };
let align = self.layout(ty)?.align.abi.bytes(); let align = self.layout(ty)?.align.abi.bytes();
destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size]) destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size])
@ -748,10 +766,14 @@ impl Evaluator<'_> {
let Some(ty) = let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError("size_of_val generic arg is not provided")); return Err(MirEvalError::InternalError(
"size_of_val generic arg is not provided".into(),
));
}; };
let [arg] = args else { let [arg] = args else {
return Err(MirEvalError::TypeError("size_of_val args are not provided")); return Err(MirEvalError::InternalError(
"size_of_val args are not provided".into(),
));
}; };
if let Some((size, _)) = self.size_align_of(ty, locals)? { if let Some((size, _)) = self.size_align_of(ty, locals)? {
destination.write_from_bytes(self, &size.to_le_bytes()) destination.write_from_bytes(self, &size.to_le_bytes())
@ -765,12 +787,14 @@ impl Evaluator<'_> {
let Some(ty) = let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"min_align_of_val generic arg is not provided", "min_align_of_val generic arg is not provided".into(),
)); ));
}; };
let [arg] = args else { let [arg] = args else {
return Err(MirEvalError::TypeError("min_align_of_val args are not provided")); return Err(MirEvalError::InternalError(
"min_align_of_val args are not provided".into(),
));
}; };
if let Some((_, align)) = self.size_align_of(ty, locals)? { if let Some((_, align)) = self.size_align_of(ty, locals)? {
destination.write_from_bytes(self, &align.to_le_bytes()) destination.write_from_bytes(self, &align.to_le_bytes())
@ -784,7 +808,9 @@ impl Evaluator<'_> {
let Some(ty) = let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError("type_name generic arg is not provided")); return Err(MirEvalError::InternalError(
"type_name generic arg is not provided".into(),
));
}; };
let ty_name = match ty.display_source_code( let ty_name = match ty.display_source_code(
self.db, self.db,
@ -808,7 +834,9 @@ impl Evaluator<'_> {
let Some(ty) = let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError("size_of generic arg is not provided")); return Err(MirEvalError::InternalError(
"size_of generic arg is not provided".into(),
));
}; };
let result = !ty.clone().is_copy(self.db, locals.body.owner); let result = !ty.clone().is_copy(self.db, locals.body.owner);
destination.write_from_bytes(self, &[u8::from(result)]) destination.write_from_bytes(self, &[u8::from(result)])
@ -817,14 +845,18 @@ impl Evaluator<'_> {
// FIXME: this is wrong for const eval, it should return 2 in some // FIXME: this is wrong for const eval, it should return 2 in some
// cases. // cases.
let [lhs, rhs] = args else { let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("wrapping_add args are not provided")); return Err(MirEvalError::InternalError(
"wrapping_add args are not provided".into(),
));
}; };
let ans = lhs.get(self)? == rhs.get(self)?; let ans = lhs.get(self)? == rhs.get(self)?;
destination.write_from_bytes(self, &[u8::from(ans)]) destination.write_from_bytes(self, &[u8::from(ans)])
} }
"saturating_add" | "saturating_sub" => { "saturating_add" | "saturating_sub" => {
let [lhs, rhs] = args else { let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("saturating_add args are not provided")); return Err(MirEvalError::InternalError(
"saturating_add args are not provided".into(),
));
}; };
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
@ -844,7 +876,9 @@ impl Evaluator<'_> {
} }
"wrapping_add" | "unchecked_add" => { "wrapping_add" | "unchecked_add" => {
let [lhs, rhs] = args else { let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("wrapping_add args are not provided")); return Err(MirEvalError::InternalError(
"wrapping_add args are not provided".into(),
));
}; };
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
@ -853,7 +887,9 @@ impl Evaluator<'_> {
} }
"ptr_offset_from_unsigned" | "ptr_offset_from" => { "ptr_offset_from_unsigned" | "ptr_offset_from" => {
let [lhs, rhs] = args else { let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("wrapping_sub args are not provided")); return Err(MirEvalError::InternalError(
"wrapping_sub args are not provided".into(),
));
}; };
let lhs = i128::from_le_bytes(pad16(lhs.get(self)?, false)); let lhs = i128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = i128::from_le_bytes(pad16(rhs.get(self)?, false)); let rhs = i128::from_le_bytes(pad16(rhs.get(self)?, false));
@ -861,8 +897,8 @@ impl Evaluator<'_> {
let Some(ty) = let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"ptr_offset_from generic arg is not provided", "ptr_offset_from generic arg is not provided".into(),
)); ));
}; };
let size = self.size_of_sized(ty, locals, "ptr_offset_from arg")? as i128; let size = self.size_of_sized(ty, locals, "ptr_offset_from arg")? as i128;
@ -871,7 +907,9 @@ impl Evaluator<'_> {
} }
"wrapping_sub" | "unchecked_sub" => { "wrapping_sub" | "unchecked_sub" => {
let [lhs, rhs] = args else { let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("wrapping_sub args are not provided")); return Err(MirEvalError::InternalError(
"wrapping_sub args are not provided".into(),
));
}; };
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
@ -880,7 +918,9 @@ impl Evaluator<'_> {
} }
"wrapping_mul" | "unchecked_mul" => { "wrapping_mul" | "unchecked_mul" => {
let [lhs, rhs] = args else { let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("wrapping_mul args are not provided")); return Err(MirEvalError::InternalError(
"wrapping_mul args are not provided".into(),
));
}; };
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
@ -890,7 +930,9 @@ impl Evaluator<'_> {
"wrapping_shl" | "unchecked_shl" => { "wrapping_shl" | "unchecked_shl" => {
// FIXME: signed // FIXME: signed
let [lhs, rhs] = args else { let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("unchecked_shl args are not provided")); return Err(MirEvalError::InternalError(
"unchecked_shl args are not provided".into(),
));
}; };
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
@ -900,7 +942,9 @@ impl Evaluator<'_> {
"wrapping_shr" | "unchecked_shr" => { "wrapping_shr" | "unchecked_shr" => {
// FIXME: signed // FIXME: signed
let [lhs, rhs] = args else { let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("unchecked_shr args are not provided")); return Err(MirEvalError::InternalError(
"unchecked_shr args are not provided".into(),
));
}; };
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
@ -910,7 +954,9 @@ impl Evaluator<'_> {
"unchecked_rem" => { "unchecked_rem" => {
// FIXME: signed // FIXME: signed
let [lhs, rhs] = args else { let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("unchecked_rem args are not provided")); return Err(MirEvalError::InternalError(
"unchecked_rem args are not provided".into(),
));
}; };
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
@ -922,7 +968,9 @@ impl Evaluator<'_> {
"unchecked_div" | "exact_div" => { "unchecked_div" | "exact_div" => {
// FIXME: signed // FIXME: signed
let [lhs, rhs] = args else { let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("unchecked_div args are not provided")); return Err(MirEvalError::InternalError(
"unchecked_div args are not provided".into(),
));
}; };
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
@ -933,7 +981,9 @@ impl Evaluator<'_> {
} }
"add_with_overflow" | "sub_with_overflow" | "mul_with_overflow" => { "add_with_overflow" | "sub_with_overflow" | "mul_with_overflow" => {
let [lhs, rhs] = args else { let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("const_eval_select args are not provided")); return Err(MirEvalError::InternalError(
"const_eval_select args are not provided".into(),
));
}; };
let result_ty = TyKind::Tuple( let result_ty = TyKind::Tuple(
2, 2,
@ -954,7 +1004,7 @@ impl Evaluator<'_> {
|| ans.to_le_bytes()[op_size..].iter().any(|&it| it != 0 && it != 255); || ans.to_le_bytes()[op_size..].iter().any(|&it| it != 0 && it != 255);
let is_overflow = vec![u8::from(is_overflow)]; let is_overflow = vec![u8::from(is_overflow)];
let layout = self.layout(&result_ty)?; let layout = self.layout(&result_ty)?;
let result = self.make_by_layout( let result = self.construct_with_layout(
layout.size.bytes_usize(), layout.size.bytes_usize(),
&layout, &layout,
None, None,
@ -966,15 +1016,15 @@ impl Evaluator<'_> {
} }
"copy" | "copy_nonoverlapping" => { "copy" | "copy_nonoverlapping" => {
let [src, dst, offset] = args else { let [src, dst, offset] = args else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"copy_nonoverlapping args are not provided", "copy_nonoverlapping args are not provided".into(),
)); ));
}; };
let Some(ty) = let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"copy_nonoverlapping generic arg is not provided", "copy_nonoverlapping generic arg is not provided".into(),
)); ));
}; };
let src = Address::from_bytes(src.get(self)?)?; let src = Address::from_bytes(src.get(self)?)?;
@ -988,18 +1038,22 @@ impl Evaluator<'_> {
} }
"offset" | "arith_offset" => { "offset" | "arith_offset" => {
let [ptr, offset] = args else { let [ptr, offset] = args else {
return Err(MirEvalError::TypeError("offset args are not provided")); return Err(MirEvalError::InternalError("offset args are not provided".into()));
}; };
let ty = if name == "offset" { let ty = if name == "offset" {
let Some(ty0) = let Some(ty0) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError("offset generic arg is not provided")); return Err(MirEvalError::InternalError(
"offset generic arg is not provided".into(),
));
}; };
let Some(ty1) = let Some(ty1) =
generic_args.as_slice(Interner).get(1).and_then(|it| it.ty(Interner)) generic_args.as_slice(Interner).get(1).and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError("offset generic arg is not provided")); return Err(MirEvalError::InternalError(
"offset generic arg is not provided".into(),
));
}; };
if !matches!( if !matches!(
ty1.as_builtin(), ty1.as_builtin(),
@ -1008,15 +1062,15 @@ impl Evaluator<'_> {
| BuiltinType::Uint(BuiltinUint::Usize) | BuiltinType::Uint(BuiltinUint::Usize)
) )
) { ) {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"offset generic arg is not usize or isize", "offset generic arg is not usize or isize".into(),
)); ));
} }
match ty0.as_raw_ptr() { match ty0.as_raw_ptr() {
Some((ty, _)) => ty, Some((ty, _)) => ty,
None => { None => {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"offset generic arg is not a raw pointer", "offset generic arg is not a raw pointer".into(),
)); ));
} }
} }
@ -1024,8 +1078,8 @@ impl Evaluator<'_> {
let Some(ty) = let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"arith_offset generic arg is not provided", "arith_offset generic arg is not provided".into(),
)); ));
}; };
ty ty
@ -1046,19 +1100,21 @@ impl Evaluator<'_> {
} }
"transmute" => { "transmute" => {
let [arg] = args else { let [arg] = args else {
return Err(MirEvalError::TypeError("transmute arg is not provided")); return Err(MirEvalError::InternalError(
"transmute arg is not provided".into(),
));
}; };
destination.write_from_interval(self, arg.interval) destination.write_from_interval(self, arg.interval)
} }
"likely" | "unlikely" => { "likely" | "unlikely" => {
let [arg] = args else { let [arg] = args else {
return Err(MirEvalError::TypeError("likely arg is not provided")); return Err(MirEvalError::InternalError("likely arg is not provided".into()));
}; };
destination.write_from_interval(self, arg.interval) destination.write_from_interval(self, arg.interval)
} }
"ctpop" => { "ctpop" => {
let [arg] = args else { let [arg] = args else {
return Err(MirEvalError::TypeError("ctpop arg is not provided")); return Err(MirEvalError::InternalError("ctpop arg is not provided".into()));
}; };
let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).count_ones(); let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).count_ones();
destination destination
@ -1066,7 +1122,7 @@ impl Evaluator<'_> {
} }
"ctlz" | "ctlz_nonzero" => { "ctlz" | "ctlz_nonzero" => {
let [arg] = args else { let [arg] = args else {
return Err(MirEvalError::TypeError("ctlz arg is not provided")); return Err(MirEvalError::InternalError("ctlz arg is not provided".into()));
}; };
let result = let result =
u128::from_le_bytes(pad16(arg.get(self)?, false)).leading_zeros() as usize; u128::from_le_bytes(pad16(arg.get(self)?, false)).leading_zeros() as usize;
@ -1076,7 +1132,7 @@ impl Evaluator<'_> {
} }
"cttz" | "cttz_nonzero" => { "cttz" | "cttz_nonzero" => {
let [arg] = args else { let [arg] = args else {
return Err(MirEvalError::TypeError("cttz arg is not provided")); return Err(MirEvalError::InternalError("cttz arg is not provided".into()));
}; };
let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).trailing_zeros(); let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).trailing_zeros();
destination destination
@ -1084,7 +1140,9 @@ impl Evaluator<'_> {
} }
"rotate_left" => { "rotate_left" => {
let [lhs, rhs] = args else { let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("rotate_left args are not provided")); return Err(MirEvalError::InternalError(
"rotate_left args are not provided".into(),
));
}; };
let lhs = &lhs.get(self)?[0..destination.size]; let lhs = &lhs.get(self)?[0..destination.size];
let rhs = rhs.get(self)?[0] as u32; let rhs = rhs.get(self)?[0] as u32;
@ -1114,7 +1172,9 @@ impl Evaluator<'_> {
} }
"rotate_right" => { "rotate_right" => {
let [lhs, rhs] = args else { let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("rotate_right args are not provided")); return Err(MirEvalError::InternalError(
"rotate_right args are not provided".into(),
));
}; };
let lhs = &lhs.get(self)?[0..destination.size]; let lhs = &lhs.get(self)?[0..destination.size];
let rhs = rhs.get(self)?[0] as u32; let rhs = rhs.get(self)?[0] as u32;
@ -1144,13 +1204,15 @@ impl Evaluator<'_> {
} }
"discriminant_value" => { "discriminant_value" => {
let [arg] = args else { let [arg] = args else {
return Err(MirEvalError::TypeError("discriminant_value arg is not provided")); return Err(MirEvalError::InternalError(
"discriminant_value arg is not provided".into(),
));
}; };
let Some(ty) = let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"discriminant_value generic arg is not provided", "discriminant_value generic arg is not provided".into(),
)); ));
}; };
let addr = Address::from_bytes(arg.get(self)?)?; let addr = Address::from_bytes(arg.get(self)?)?;
@ -1161,11 +1223,15 @@ impl Evaluator<'_> {
} }
"const_eval_select" => { "const_eval_select" => {
let [tuple, const_fn, _] = args else { let [tuple, const_fn, _] = args else {
return Err(MirEvalError::TypeError("const_eval_select args are not provided")); return Err(MirEvalError::InternalError(
"const_eval_select args are not provided".into(),
));
}; };
let mut args = vec![const_fn.clone()]; let mut args = vec![const_fn.clone()];
let TyKind::Tuple(_, fields) = tuple.ty.kind(Interner) else { let TyKind::Tuple(_, fields) = tuple.ty.kind(Interner) else {
return Err(MirEvalError::TypeError("const_eval_select arg[0] is not a tuple")); return Err(MirEvalError::InternalError(
"const_eval_select arg[0] is not a tuple".into(),
));
}; };
let layout = self.layout(&tuple.ty)?; let layout = self.layout(&tuple.ty)?;
for (i, field) in fields.iter(Interner).enumerate() { for (i, field) in fields.iter(Interner).enumerate() {
@ -1196,21 +1262,25 @@ impl Evaluator<'_> {
} }
"read_via_copy" | "volatile_load" => { "read_via_copy" | "volatile_load" => {
let [arg] = args else { let [arg] = args else {
return Err(MirEvalError::TypeError("read_via_copy args are not provided")); return Err(MirEvalError::InternalError(
"read_via_copy args are not provided".into(),
));
}; };
let addr = Address::from_bytes(arg.interval.get(self)?)?; let addr = Address::from_bytes(arg.interval.get(self)?)?;
destination.write_from_interval(self, Interval { addr, size: destination.size }) destination.write_from_interval(self, Interval { addr, size: destination.size })
} }
"write_via_move" => { "write_via_move" => {
let [ptr, val] = args else { let [ptr, val] = args else {
return Err(MirEvalError::TypeError("write_via_move args are not provided")); return Err(MirEvalError::InternalError(
"write_via_move args are not provided".into(),
));
}; };
let dst = Address::from_bytes(ptr.get(self)?)?; let dst = Address::from_bytes(ptr.get(self)?)?;
let Some(ty) = let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"write_via_copy generic arg is not provided", "write_via_copy generic arg is not provided".into(),
)); ));
}; };
let size = self.size_of_sized(ty, locals, "write_via_move ptr type")?; let size = self.size_of_sized(ty, locals, "write_via_move ptr type")?;
@ -1219,14 +1289,18 @@ impl Evaluator<'_> {
} }
"write_bytes" => { "write_bytes" => {
let [dst, val, count] = args else { let [dst, val, count] = args else {
return Err(MirEvalError::TypeError("write_bytes args are not provided")); return Err(MirEvalError::InternalError(
"write_bytes args are not provided".into(),
));
}; };
let count = from_bytes!(usize, count.get(self)?); let count = from_bytes!(usize, count.get(self)?);
let val = from_bytes!(u8, val.get(self)?); let val = from_bytes!(u8, val.get(self)?);
let Some(ty) = let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError("write_bytes generic arg is not provided")); return Err(MirEvalError::InternalError(
"write_bytes generic arg is not provided".into(),
));
}; };
let dst = Address::from_bytes(dst.get(self)?)?; let dst = Address::from_bytes(dst.get(self)?)?;
let size = self.size_of_sized(ty, locals, "copy_nonoverlapping ptr type")?; let size = self.size_of_sized(ty, locals, "copy_nonoverlapping ptr type")?;
@ -1310,10 +1384,14 @@ impl Evaluator<'_> {
let Some(ty) = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) let Some(ty) = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError("atomic intrinsic generic arg is not provided")); return Err(MirEvalError::InternalError(
"atomic intrinsic generic arg is not provided".into(),
));
}; };
let Some(arg0) = args.first() else { let Some(arg0) = args.first() else {
return Err(MirEvalError::TypeError("atomic intrinsic arg0 is not provided")); return Err(MirEvalError::InternalError(
"atomic intrinsic arg0 is not provided".into(),
));
}; };
let arg0_addr = Address::from_bytes(arg0.get(self)?)?; let arg0_addr = Address::from_bytes(arg0.get(self)?)?;
let arg0_interval = let arg0_interval =
@ -1322,7 +1400,9 @@ impl Evaluator<'_> {
return destination.write_from_interval(self, arg0_interval); return destination.write_from_interval(self, arg0_interval);
} }
let Some(arg1) = args.get(1) else { let Some(arg1) = args.get(1) else {
return Err(MirEvalError::TypeError("atomic intrinsic arg1 is not provided")); return Err(MirEvalError::InternalError(
"atomic intrinsic arg1 is not provided".into(),
));
}; };
if name.starts_with("store_") { if name.starts_with("store_") {
return arg0_interval.write_from_interval(self, arg1.interval); return arg0_interval.write_from_interval(self, arg1.interval);
@ -1374,7 +1454,9 @@ impl Evaluator<'_> {
return arg0_interval.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]); return arg0_interval.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
} }
let Some(arg2) = args.get(2) else { let Some(arg2) = args.get(2) else {
return Err(MirEvalError::TypeError("atomic intrinsic arg2 is not provided")); return Err(MirEvalError::InternalError(
"atomic intrinsic arg2 is not provided".into(),
));
}; };
if name.starts_with("cxchg_") || name.starts_with("cxchgweak_") { if name.starts_with("cxchg_") || name.starts_with("cxchgweak_") {
let dest = if arg1.get(self)? == arg0_interval.get(self)? { let dest = if arg1.get(self)? == arg0_interval.get(self)? {
@ -1389,7 +1471,7 @@ impl Evaluator<'_> {
) )
.intern(Interner); .intern(Interner);
let layout = self.layout(&result_ty)?; let layout = self.layout(&result_ty)?;
let result = self.make_by_layout( let result = self.construct_with_layout(
layout.size.bytes_usize(), layout.size.bytes_usize(),
&layout, &layout,
None, None,

View file

@ -10,7 +10,7 @@ macro_rules! from_bytes {
($ty:tt, $value:expr) => { ($ty:tt, $value:expr) => {
($ty::from_le_bytes(match ($value).try_into() { ($ty::from_le_bytes(match ($value).try_into() {
Ok(it) => it, Ok(it) => it,
Err(_) => return Err(MirEvalError::TypeError("mismatched size")), Err(_) => return Err(MirEvalError::InternalError("mismatched size".into())),
})) }))
}; };
} }
@ -40,7 +40,9 @@ impl Evaluator<'_> {
.substitute(Interner, subst); .substitute(Interner, subst);
return Ok((fields.len(), field_ty)); return Ok((fields.len(), field_ty));
} }
return Err(MirEvalError::TypeError("simd type with no len param")); return Err(MirEvalError::InternalError(
"simd type with no len param".into(),
));
} }
}; };
match try_const_usize(self.db, len) { match try_const_usize(self.db, len) {
@ -48,14 +50,18 @@ impl Evaluator<'_> {
let Some(ty) = let Some(ty) =
subst.as_slice(Interner).first().and_then(|it| it.ty(Interner)) subst.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else { else {
return Err(MirEvalError::TypeError("simd type with no ty param")); return Err(MirEvalError::InternalError(
"simd type with no ty param".into(),
));
}; };
Ok((len as usize, ty.clone())) Ok((len as usize, ty.clone()))
} }
None => Err(MirEvalError::TypeError("simd type with unevaluatable len param")), None => Err(MirEvalError::InternalError(
"simd type with unevaluatable len param".into(),
)),
} }
} }
_ => Err(MirEvalError::TypeError("simd type which is not a struct")), _ => Err(MirEvalError::InternalError("simd type which is not a struct".into())),
} }
} }
@ -71,7 +77,9 @@ impl Evaluator<'_> {
match name { match name {
"and" | "or" | "xor" => { "and" | "or" | "xor" => {
let [left, right] = args else { let [left, right] = args else {
return Err(MirEvalError::TypeError("simd bit op args are not provided")); return Err(MirEvalError::InternalError(
"simd bit op args are not provided".into(),
));
}; };
let result = left let result = left
.get(self)? .get(self)?
@ -88,7 +96,7 @@ impl Evaluator<'_> {
} }
"eq" | "ne" | "lt" | "le" | "gt" | "ge" => { "eq" | "ne" | "lt" | "le" | "gt" | "ge" => {
let [left, right] = args else { let [left, right] = args else {
return Err(MirEvalError::TypeError("simd args are not provided")); return Err(MirEvalError::InternalError("simd args are not provided".into()));
}; };
let (len, ty) = self.detect_simd_ty(&left.ty)?; let (len, ty) = self.detect_simd_ty(&left.ty)?;
let is_signed = matches!(ty.as_builtin(), Some(BuiltinType::Int(_))); let is_signed = matches!(ty.as_builtin(), Some(BuiltinType::Int(_)));
@ -125,7 +133,9 @@ impl Evaluator<'_> {
} }
"bitmask" => { "bitmask" => {
let [op] = args else { let [op] = args else {
return Err(MirEvalError::TypeError("simd_bitmask args are not provided")); return Err(MirEvalError::InternalError(
"simd_bitmask args are not provided".into(),
));
}; };
let (op_len, _) = self.detect_simd_ty(&op.ty)?; let (op_len, _) = self.detect_simd_ty(&op.ty)?;
let op_count = op.interval.size / op_len; let op_count = op.interval.size / op_len;
@ -139,18 +149,20 @@ impl Evaluator<'_> {
} }
"shuffle" => { "shuffle" => {
let [left, right, index] = args else { let [left, right, index] = args else {
return Err(MirEvalError::TypeError("simd_shuffle args are not provided")); return Err(MirEvalError::InternalError(
"simd_shuffle args are not provided".into(),
));
}; };
let TyKind::Array(_, index_len) = index.ty.kind(Interner) else { let TyKind::Array(_, index_len) = index.ty.kind(Interner) else {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"simd_shuffle index argument has non-array type", "simd_shuffle index argument has non-array type".into(),
)); ));
}; };
let index_len = match try_const_usize(self.db, index_len) { let index_len = match try_const_usize(self.db, index_len) {
Some(it) => it as usize, Some(it) => it as usize,
None => { None => {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"simd type with unevaluatable len param", "simd type with unevaluatable len param".into(),
)) ))
} }
}; };
@ -164,8 +176,8 @@ impl Evaluator<'_> {
let val = match vector.clone().nth(index) { let val = match vector.clone().nth(index) {
Some(it) => it, Some(it) => it,
None => { None => {
return Err(MirEvalError::TypeError( return Err(MirEvalError::InternalError(
"out of bound access in simd shuffle", "out of bound access in simd shuffle".into(),
)) ))
} }
}; };

View file

@ -97,7 +97,7 @@ pub enum MirLowerError {
MutatingRvalue, MutatingRvalue,
UnresolvedLabel, UnresolvedLabel,
UnresolvedUpvar(Place), UnresolvedUpvar(Place),
UnaccessableLocal, InaccessibleLocal,
// monomorphization errors: // monomorphization errors:
GenericArgNotProvided(TypeOrConstParamId, Substitution), GenericArgNotProvided(TypeOrConstParamId, Substitution),
@ -116,7 +116,7 @@ impl DropScopeToken {
ctx.pop_drop_scope_internal(current, span) ctx.pop_drop_scope_internal(current, span)
} }
/// It is useful when we want a drop scope is syntaxically closed, but we don't want to execute any drop /// It is useful when we want a drop scope is syntactically closed, but we don't want to execute any drop
/// code. Either when the control flow is diverging (so drop code doesn't reached) or when drop is handled /// code. Either when the control flow is diverging (so drop code doesn't reached) or when drop is handled
/// for us (for example a block that ended with a return statement. Return will drop everything, so the block shouldn't /// for us (for example a block that ended with a return statement. Return will drop everything, so the block shouldn't
/// do anything) /// do anything)
@ -186,7 +186,7 @@ impl MirLowerError {
| MirLowerError::UnsizedTemporary(_) | MirLowerError::UnsizedTemporary(_)
| MirLowerError::IncompleteExpr | MirLowerError::IncompleteExpr
| MirLowerError::IncompletePattern | MirLowerError::IncompletePattern
| MirLowerError::UnaccessableLocal | MirLowerError::InaccessibleLocal
| MirLowerError::TraitFunctionDefinition(_, _) | MirLowerError::TraitFunctionDefinition(_, _)
| MirLowerError::UnresolvedName(_) | MirLowerError::UnresolvedName(_)
| MirLowerError::RecordLiteralWithoutPath | MirLowerError::RecordLiteralWithoutPath
@ -939,7 +939,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
Ok(Some(current)) Ok(Some(current))
} }
Expr::BinaryOp { lhs, rhs, op } => { Expr::BinaryOp { lhs, rhs, op } => {
let op = op.ok_or(MirLowerError::IncompleteExpr)?; let op: BinaryOp = op.ok_or(MirLowerError::IncompleteExpr)?;
let is_builtin = 'b: { let is_builtin = 'b: {
// Without adjust here is a hack. We assume that we know every possible adjustment // Without adjust here is a hack. We assume that we know every possible adjustment
// for binary operator, and use without adjust to simplify our conditions. // for binary operator, and use without adjust to simplify our conditions.
@ -1843,8 +1843,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
None => { None => {
// FIXME: It should never happens, but currently it will happen in `const_dependent_on_local` test, which // FIXME: It should never happens, but currently it will happen in `const_dependent_on_local` test, which
// is a hir lowering problem IMO. // is a hir lowering problem IMO.
// never!("Using unaccessable local for binding is always a bug"); // never!("Using inaccessible local for binding is always a bug");
Err(MirLowerError::UnaccessableLocal) Err(MirLowerError::InaccessibleLocal)
} }
} }
} }
@ -2068,7 +2068,7 @@ pub fn mir_body_for_closure_query(
} }
pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<MirBody>> { pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<MirBody>> {
let _p = profile::span("mir_body_query").detail(|| match def { let detail = match def {
DefWithBodyId::FunctionId(it) => db.function_data(it).name.display(db.upcast()).to_string(), DefWithBodyId::FunctionId(it) => db.function_data(it).name.display(db.upcast()).to_string(),
DefWithBodyId::StaticId(it) => db.static_data(it).name.display(db.upcast()).to_string(), DefWithBodyId::StaticId(it) => db.static_data(it).name.display(db.upcast()).to_string(),
DefWithBodyId::ConstId(it) => db DefWithBodyId::ConstId(it) => db
@ -2082,7 +2082,8 @@ pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<Mi
db.enum_variant_data(it).name.display(db.upcast()).to_string() db.enum_variant_data(it).name.display(db.upcast()).to_string()
} }
DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"), DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"),
}); };
let _p = tracing::span!(tracing::Level::INFO, "mir_body_query", ?detail).entered();
let body = db.body(def); let body = db.body(def);
let infer = db.infer(def); let infer = db.infer(def);
let mut result = lower_to_mir(db, def, &body, &infer, body.body_expr)?; let mut result = lower_to_mir(db, def, &body, &infer, body.body_expr)?;

View file

@ -114,7 +114,7 @@ impl MirLowerCtx<'_> {
index: i as u32, index: i as u32,
})) }))
}), }),
&mut cond_place, &cond_place,
mode, mode,
)? )?
} }

View file

@ -3424,7 +3424,7 @@ fn bin_op_with_rhs_is_self_for_assoc_bound() {
fn repro<T>(t: T) -> bool fn repro<T>(t: T) -> bool
where where
T: Request, T: Request,
T::Output: Convertable, T::Output: Convertible,
{ {
let a = execute(&t).convert(); let a = execute(&t).convert();
let b = execute(&t).convert(); let b = execute(&t).convert();
@ -3439,7 +3439,7 @@ where
{ {
<T as Request>::output() <T as Request>::output()
} }
trait Convertable { trait Convertible {
type TraitSelf: PartialEq<Self::TraitSelf>; type TraitSelf: PartialEq<Self::TraitSelf>;
type AssocAsDefaultSelf: PartialEq; type AssocAsDefaultSelf: PartialEq;
fn convert(self) -> Self::AssocAsDefaultSelf; fn convert(self) -> Self::AssocAsDefaultSelf;

View file

@ -100,13 +100,14 @@ pub(crate) fn trait_solve_query(
block: Option<BlockId>, block: Option<BlockId>,
goal: Canonical<InEnvironment<Goal>>, goal: Canonical<InEnvironment<Goal>>,
) -> Option<Solution> { ) -> Option<Solution> {
let _p = profile::span("trait_solve_query").detail(|| match &goal.value.goal.data(Interner) { let detail = match &goal.value.goal.data(Interner) {
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => { GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => {
db.trait_data(it.hir_trait_id()).name.display(db.upcast()).to_string() db.trait_data(it.hir_trait_id()).name.display(db.upcast()).to_string()
} }
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_string(), GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_string(),
_ => "??".to_string(), _ => "??".to_string(),
}); };
let _p = tracing::span!(tracing::Level::INFO, "trait_solve_query", ?detail).entered();
tracing::info!("trait_solve_query({:?})", goal.value.goal); tracing::info!("trait_solve_query({:?})", goal.value.goal);
if let GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(AliasEq { if let GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(AliasEq {

View file

@ -24,18 +24,18 @@ use hir_def::{
}; };
use hir_expand::name::Name; use hir_expand::name::Name;
use intern::Interned; use intern::Interned;
use rustc_abi::TargetDataLayout;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use stdx::never; use stdx::never;
use triomphe::Arc;
use crate::{ use crate::{
consteval::unknown_const, consteval::unknown_const,
db::HirDatabase, db::HirDatabase,
layout::{Layout, TagEncoding}, layout::{Layout, TagEncoding},
mir::pad16, mir::pad16,
ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TraitEnvironment, ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TraitRef, TraitRefExt,
TraitRef, TraitRefExt, Ty, WhereClause, Ty, WhereClause,
}; };
pub(crate) fn fn_traits( pub(crate) fn fn_traits(
@ -192,7 +192,7 @@ pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
/// and it doesn't store the closure types and fields. /// and it doesn't store the closure types and fields.
/// ///
/// Codes should not assume this ordering, and should always use methods available /// Codes should not assume this ordering, and should always use methods available
/// on this struct for retriving, and `TyBuilder::substs_for_closure` for creating. /// on this struct for retrieving, and `TyBuilder::substs_for_closure` for creating.
pub(crate) struct ClosureSubst<'a>(pub(crate) &'a Substitution); pub(crate) struct ClosureSubst<'a>(pub(crate) &'a Substitution);
impl<'a> ClosureSubst<'a> { impl<'a> ClosureSubst<'a> {
@ -431,18 +431,16 @@ impl FallibleTypeFolder<Interner> for UnevaluatedConstEvaluatorFolder<'_> {
pub(crate) fn detect_variant_from_bytes<'a>( pub(crate) fn detect_variant_from_bytes<'a>(
layout: &'a Layout, layout: &'a Layout,
db: &dyn HirDatabase, db: &dyn HirDatabase,
trait_env: Arc<TraitEnvironment>, target_data_layout: &TargetDataLayout,
b: &[u8], b: &[u8],
e: EnumId, e: EnumId,
) -> Option<(EnumVariantId, &'a Layout)> { ) -> Option<(EnumVariantId, &'a Layout)> {
let krate = trait_env.krate;
let (var_id, var_layout) = match &layout.variants { let (var_id, var_layout) = match &layout.variants {
hir_def::layout::Variants::Single { index } => { hir_def::layout::Variants::Single { index } => {
(db.enum_data(e).variants[index.0].0, layout) (db.enum_data(e).variants[index.0].0, layout)
} }
hir_def::layout::Variants::Multiple { tag, tag_encoding, variants, .. } => { hir_def::layout::Variants::Multiple { tag, tag_encoding, variants, .. } => {
let target_data_layout = db.target_data_layout(krate)?; let size = tag.size(target_data_layout).bytes_usize();
let size = tag.size(&*target_data_layout).bytes_usize();
let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field
let tag = i128::from_le_bytes(pad16(&b[offset..offset + size], false)); let tag = i128::from_le_bytes(pad16(&b[offset..offset + size], false));
match tag_encoding { match tag_encoding {

View file

@ -17,6 +17,7 @@ either.workspace = true
arrayvec.workspace = true arrayvec.workspace = true
itertools.workspace = true itertools.workspace = true
smallvec.workspace = true smallvec.workspace = true
tracing.workspace = true
triomphe.workspace = true triomphe.workspace = true
once_cell = "1.17.1" once_cell = "1.17.1"
@ -30,6 +31,7 @@ profile.workspace = true
stdx.workspace = true stdx.workspace = true
syntax.workspace = true syntax.workspace = true
tt.workspace = true tt.workspace = true
span.workspace = true
[features] [features]
in-rust-tree = [] in-rust-tree = []

View file

@ -239,10 +239,9 @@ fn resolve_impl_trait_item(
) -> Option<DocLinkDef> { ) -> Option<DocLinkDef> {
let canonical = ty.canonical(); let canonical = ty.canonical();
let krate = ty.krate(db); let krate = ty.krate(db);
let environment = resolver.generic_def().map_or_else( let environment = resolver
|| crate::TraitEnvironment::empty(krate.id).into(), .generic_def()
|d| db.trait_environment(d), .map_or_else(|| crate::TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
);
let traits_in_scope = resolver.traits_in_scope(db.upcast()); let traits_in_scope = resolver.traits_in_scope(db.upcast());
let mut result = None; let mut result = None;
@ -297,7 +296,7 @@ fn as_module_def_if_namespace_matches(
AssocItem::TypeAlias(it) => (ModuleDef::TypeAlias(it), Namespace::Types), AssocItem::TypeAlias(it) => (ModuleDef::TypeAlias(it), Namespace::Types),
}; };
(ns.unwrap_or(expected_ns) == expected_ns).then(|| DocLinkDef::ModuleDef(def)) (ns.unwrap_or(expected_ns) == expected_ns).then_some(DocLinkDef::ModuleDef(def))
} }
fn modpath_from_str(link: &str) -> Option<ModPath> { fn modpath_from_str(link: &str) -> Option<ModPath> {
@ -311,7 +310,7 @@ fn modpath_from_str(link: &str) -> Option<ModPath> {
"self" => PathKind::Super(0), "self" => PathKind::Super(0),
"super" => { "super" => {
let mut deg = 1; let mut deg = 1;
while let Some(segment) = parts.next() { for segment in parts.by_ref() {
if segment == "super" { if segment == "super" {
deg += 1; deg += 1;
} else { } else {

View file

@ -4,11 +4,12 @@
//! This probably isn't the best way to do this -- ideally, diagnostics should //! This probably isn't the best way to do this -- ideally, diagnostics should
//! be expressed in terms of hir types themselves. //! be expressed in terms of hir types themselves.
pub use hir_ty::diagnostics::{CaseType, IncorrectCase}; pub use hir_ty::diagnostics::{CaseType, IncorrectCase};
use hir_ty::{db::HirDatabase, diagnostics::BodyValidationDiagnostic, InferenceDiagnostic};
use base_db::CrateId; use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions}; use cfg::{CfgExpr, CfgOptions};
use either::Either; use either::Either;
use hir_def::{path::ModPath, AssocItemId}; use hir_def::{body::SyntheticSyntax, hir::ExprOrPatId, path::ModPath, AssocItemId, DefWithBodyId};
use hir_expand::{name::Name, HirFileId, InFile}; use hir_expand::{name::Name, HirFileId, InFile};
use syntax::{ast, AstPtr, SyntaxError, SyntaxNodePtr, TextRange}; use syntax::{ast, AstPtr, SyntaxError, SyntaxNodePtr, TextRange};
@ -30,14 +31,28 @@ macro_rules! diagnostics {
)* )*
}; };
} }
// FIXME Accept something like the following in the macro call instead
// diagnostics![
// pub struct BreakOutsideOfLoop {
// pub expr: InFile<AstPtr<ast::Expr>>,
// pub is_break: bool,
// pub bad_value_break: bool,
// }, ...
// or more concisely
// BreakOutsideOfLoop {
// expr: InFile<AstPtr<ast::Expr>>,
// is_break: bool,
// bad_value_break: bool,
// }, ...
// ]
diagnostics![ diagnostics![
BreakOutsideOfLoop, BreakOutsideOfLoop,
ExpectedFunction, ExpectedFunction,
InactiveCode, InactiveCode,
IncoherentImpl,
IncorrectCase, IncorrectCase,
InvalidDeriveTarget, InvalidDeriveTarget,
IncoherentImpl,
MacroDefError, MacroDefError,
MacroError, MacroError,
MacroExpansionParseError, MacroExpansionParseError,
@ -55,8 +70,8 @@ diagnostics![
ReplaceFilterMapNextWithFindMap, ReplaceFilterMapNextWithFindMap,
TraitImplIncorrectSafety, TraitImplIncorrectSafety,
TraitImplMissingAssocItems, TraitImplMissingAssocItems,
TraitImplRedundantAssocItems,
TraitImplOrphan, TraitImplOrphan,
TraitImplRedundantAssocItems,
TypedHole, TypedHole,
TypeMismatch, TypeMismatch,
UndeclaredLabel, UndeclaredLabel,
@ -326,3 +341,219 @@ pub struct TraitImplRedundantAssocItems {
pub impl_: AstPtr<ast::Impl>, pub impl_: AstPtr<ast::Impl>,
pub assoc_item: (Name, AssocItem), pub assoc_item: (Name, AssocItem),
} }
impl AnyDiagnostic {
pub(crate) fn body_validation_diagnostic(
db: &dyn HirDatabase,
diagnostic: BodyValidationDiagnostic,
source_map: &hir_def::body::BodySourceMap,
) -> Option<AnyDiagnostic> {
match diagnostic {
BodyValidationDiagnostic::RecordMissingFields { record, variant, missed_fields } => {
let variant_data = variant.variant_data(db.upcast());
let missed_fields = missed_fields
.into_iter()
.map(|idx| variant_data.fields()[idx].name.clone())
.collect();
match record {
Either::Left(record_expr) => match source_map.expr_syntax(record_expr) {
Ok(source_ptr) => {
let root = source_ptr.file_syntax(db.upcast());
if let ast::Expr::RecordExpr(record_expr) =
source_ptr.value.to_node(&root)
{
if record_expr.record_expr_field_list().is_some() {
let field_list_parent_path =
record_expr.path().map(|path| AstPtr::new(&path));
return Some(
MissingFields {
file: source_ptr.file_id,
field_list_parent: AstPtr::new(&Either::Left(
record_expr,
)),
field_list_parent_path,
missed_fields,
}
.into(),
);
}
}
}
Err(SyntheticSyntax) => (),
},
Either::Right(record_pat) => match source_map.pat_syntax(record_pat) {
Ok(source_ptr) => {
if let Some(ptr) = source_ptr.value.cast::<ast::RecordPat>() {
let root = source_ptr.file_syntax(db.upcast());
let record_pat = ptr.to_node(&root);
if record_pat.record_pat_field_list().is_some() {
let field_list_parent_path =
record_pat.path().map(|path| AstPtr::new(&path));
return Some(
MissingFields {
file: source_ptr.file_id,
field_list_parent: AstPtr::new(&Either::Right(
record_pat,
)),
field_list_parent_path,
missed_fields,
}
.into(),
);
}
}
}
Err(SyntheticSyntax) => (),
},
}
}
BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap { method_call_expr } => {
if let Ok(next_source_ptr) = source_map.expr_syntax(method_call_expr) {
return Some(
ReplaceFilterMapNextWithFindMap {
file: next_source_ptr.file_id,
next_expr: next_source_ptr.value,
}
.into(),
);
}
}
BodyValidationDiagnostic::MissingMatchArms { match_expr, uncovered_patterns } => {
match source_map.expr_syntax(match_expr) {
Ok(source_ptr) => {
let root = source_ptr.file_syntax(db.upcast());
if let ast::Expr::MatchExpr(match_expr) = &source_ptr.value.to_node(&root) {
match match_expr.expr() {
Some(scrut_expr) if match_expr.match_arm_list().is_some() => {
return Some(
MissingMatchArms {
scrutinee_expr: InFile::new(
source_ptr.file_id,
AstPtr::new(&scrut_expr),
),
uncovered_patterns,
}
.into(),
);
}
_ => {}
}
}
}
Err(SyntheticSyntax) => (),
}
}
}
None
}
pub(crate) fn inference_diagnostic(
db: &dyn HirDatabase,
def: DefWithBodyId,
d: &InferenceDiagnostic,
source_map: &hir_def::body::BodySourceMap,
) -> Option<AnyDiagnostic> {
let expr_syntax = |expr| source_map.expr_syntax(expr).expect("unexpected synthetic");
let pat_syntax = |pat| source_map.pat_syntax(pat).expect("unexpected synthetic");
Some(match d {
&InferenceDiagnostic::NoSuchField { field: expr, private } => {
let expr_or_pat = match expr {
ExprOrPatId::ExprId(expr) => {
source_map.field_syntax(expr).map(AstPtr::wrap_left)
}
ExprOrPatId::PatId(pat) => {
source_map.pat_field_syntax(pat).map(AstPtr::wrap_right)
}
};
NoSuchField { field: expr_or_pat, private }.into()
}
&InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
MismatchedArgCount { call_expr: expr_syntax(call_expr), expected, found }.into()
}
&InferenceDiagnostic::PrivateField { expr, field } => {
let expr = expr_syntax(expr);
let field = field.into();
PrivateField { expr, field }.into()
}
&InferenceDiagnostic::PrivateAssocItem { id, item } => {
let expr_or_pat = match id {
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right),
};
let item = item.into();
PrivateAssocItem { expr_or_pat, item }.into()
}
InferenceDiagnostic::ExpectedFunction { call_expr, found } => {
let call_expr = expr_syntax(*call_expr);
ExpectedFunction { call: call_expr, found: Type::new(db, def, found.clone()) }
.into()
}
InferenceDiagnostic::UnresolvedField {
expr,
receiver,
name,
method_with_same_name_exists,
} => {
let expr = expr_syntax(*expr);
UnresolvedField {
expr,
name: name.clone(),
receiver: Type::new(db, def, receiver.clone()),
method_with_same_name_exists: *method_with_same_name_exists,
}
.into()
}
InferenceDiagnostic::UnresolvedMethodCall {
expr,
receiver,
name,
field_with_same_name,
assoc_func_with_same_name,
} => {
let expr = expr_syntax(*expr);
UnresolvedMethodCall {
expr,
name: name.clone(),
receiver: Type::new(db, def, receiver.clone()),
field_with_same_name: field_with_same_name
.clone()
.map(|ty| Type::new(db, def, ty)),
assoc_func_with_same_name: *assoc_func_with_same_name,
}
.into()
}
&InferenceDiagnostic::UnresolvedAssocItem { id } => {
let expr_or_pat = match id {
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right),
};
UnresolvedAssocItem { expr_or_pat }.into()
}
&InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break, bad_value_break } => {
let expr = expr_syntax(expr);
BreakOutsideOfLoop { expr, is_break, bad_value_break }.into()
}
InferenceDiagnostic::TypedHole { expr, expected } => {
let expr = expr_syntax(*expr);
TypedHole { expr, expected: Type::new(db, def, expected.clone()) }.into()
}
&InferenceDiagnostic::MismatchedTupleStructPatArgCount { pat, expected, found } => {
let expr_or_pat = match pat {
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
ExprOrPatId::PatId(pat) => {
let InFile { file_id, value } =
source_map.pat_syntax(pat).expect("unexpected synthetic");
// cast from Either<Pat, SelfParam> -> Either<_, Pat>
let Some(ptr) = AstPtr::try_from_raw(value.syntax_node_ptr()) else {
return None;
};
InFile { file_id, value: ptr }
}
};
MismatchedTupleStructPatArgCount { expr_or_pat, expected, found }.into()
}
})
}
}

View file

@ -61,7 +61,7 @@ use hir_def::{
use hir_expand::{attrs::collect_attrs, name::name, proc_macro::ProcMacroKind, MacroCallKind}; use hir_expand::{attrs::collect_attrs, name::name, proc_macro::ProcMacroKind, MacroCallKind};
use hir_ty::{ use hir_ty::{
all_super_traits, autoderef, check_orphan_rules, all_super_traits, autoderef, check_orphan_rules,
consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt}, consteval::{try_const_usize, unknown_const_as_generic, ConstExt},
diagnostics::BodyValidationDiagnostic, diagnostics::BodyValidationDiagnostic,
known_const_to_ast, known_const_to_ast,
layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding}, layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding},
@ -70,9 +70,9 @@ use hir_ty::{
primitive::UintTy, primitive::UintTy,
traits::FnTrait, traits::FnTrait,
AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg, AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg,
GenericArgData, InferenceDiagnostic, Interner, ParamKind, QuantifiedWhereClause, Scalar, GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution,
Substitution, TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, ValueTyDefId,
ValueTyDefId, WhereClause, WhereClause,
}; };
use itertools::Itertools; use itertools::Itertools;
use nameres::diagnostics::DefDiagnosticKind; use nameres::diagnostics::DefDiagnosticKind;
@ -131,8 +131,10 @@ pub use {
MacroFileIdExt, MacroFileIdExt,
}, },
hir_ty::{ hir_ty::{
consteval::ConstEvalError,
display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite}, display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
layout::LayoutError, layout::LayoutError,
mir::{MirEvalError, MirLowerError},
PointerCast, Safety, PointerCast, Safety,
}, },
// FIXME: Properly encapsulate mir // FIXME: Properly encapsulate mir
@ -233,8 +235,8 @@ impl Crate {
db: &dyn DefDatabase, db: &dyn DefDatabase,
query: import_map::Query, query: import_map::Query,
) -> impl Iterator<Item = Either<ModuleDef, Macro>> { ) -> impl Iterator<Item = Either<ModuleDef, Macro>> {
let _p = profile::span("query_external_importables"); let _p = tracing::span!(tracing::Level::INFO, "query_external_importables");
import_map::search_dependencies(db, self.into(), query).into_iter().map(|item| { import_map::search_dependencies(db, self.into(), &query).into_iter().map(|item| {
match ItemInNs::from(item) { match ItemInNs::from(item) {
ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id), ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id),
ItemInNs::Macros(mac_id) => Either::Right(mac_id), ItemInNs::Macros(mac_id) => Either::Right(mac_id),
@ -537,13 +539,8 @@ impl Module {
/// Fills `acc` with the module's diagnostics. /// Fills `acc` with the module's diagnostics.
pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) { pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
let _p = profile::span("Module::diagnostics").detail(|| { let name = self.name(db);
format!( let _p = tracing::span!(tracing::Level::INFO, "Module::diagnostics", ?name);
"{:?}",
self.name(db)
.map_or("<unknown>".into(), |name| name.display(db.upcast()).to_string())
)
});
let def_map = self.id.def_map(db.upcast()); let def_map = self.id.def_map(db.upcast());
for diag in def_map.diagnostics() { for diag in def_map.diagnostics() {
if diag.in_module != self.id.local_id { if diag.in_module != self.id.local_id {
@ -906,7 +903,7 @@ fn emit_def_diagnostic_(
} }
DefDiagnosticKind::InvalidDeriveTarget { ast, id } => { DefDiagnosticKind::InvalidDeriveTarget { ast, id } => {
let node = ast.to_node(db.upcast()); let node = ast.to_node(db.upcast());
let derive = node.attrs().nth(*id as usize); let derive = node.attrs().nth(*id);
match derive { match derive {
Some(derive) => { Some(derive) => {
acc.push( acc.push(
@ -921,7 +918,7 @@ fn emit_def_diagnostic_(
} }
DefDiagnosticKind::MalformedDerive { ast, id } => { DefDiagnosticKind::MalformedDerive { ast, id } => {
let node = ast.to_node(db.upcast()); let node = ast.to_node(db.upcast());
let derive = node.attrs().nth(*id as usize); let derive = node.attrs().nth(*id);
match derive { match derive {
Some(derive) => { Some(derive) => {
acc.push( acc.push(
@ -1626,116 +1623,8 @@ impl DefWithBody {
} }
let infer = db.infer(self.into()); let infer = db.infer(self.into());
let expr_syntax = |expr| source_map.expr_syntax(expr).expect("unexpected synthetic");
let pat_syntax = |pat| source_map.pat_syntax(pat).expect("unexpected synthetic");
for d in &infer.diagnostics { for d in &infer.diagnostics {
acc.push(match d { acc.extend(AnyDiagnostic::inference_diagnostic(db, self.into(), d, &source_map));
&InferenceDiagnostic::NoSuchField { field: expr, private } => {
let expr_or_pat = match expr {
ExprOrPatId::ExprId(expr) => {
source_map.field_syntax(expr).map(AstPtr::wrap_left)
}
ExprOrPatId::PatId(pat) => {
source_map.pat_field_syntax(pat).map(AstPtr::wrap_right)
}
};
NoSuchField { field: expr_or_pat, private }.into()
}
&InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
MismatchedArgCount { call_expr: expr_syntax(call_expr), expected, found }.into()
}
&InferenceDiagnostic::PrivateField { expr, field } => {
let expr = expr_syntax(expr);
let field = field.into();
PrivateField { expr, field }.into()
}
&InferenceDiagnostic::PrivateAssocItem { id, item } => {
let expr_or_pat = match id {
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right),
};
let item = item.into();
PrivateAssocItem { expr_or_pat, item }.into()
}
InferenceDiagnostic::ExpectedFunction { call_expr, found } => {
let call_expr = expr_syntax(*call_expr);
ExpectedFunction {
call: call_expr,
found: Type::new(db, DefWithBodyId::from(self), found.clone()),
}
.into()
}
InferenceDiagnostic::UnresolvedField {
expr,
receiver,
name,
method_with_same_name_exists,
} => {
let expr = expr_syntax(*expr);
UnresolvedField {
expr,
name: name.clone(),
receiver: Type::new(db, DefWithBodyId::from(self), receiver.clone()),
method_with_same_name_exists: *method_with_same_name_exists,
}
.into()
}
InferenceDiagnostic::UnresolvedMethodCall {
expr,
receiver,
name,
field_with_same_name,
assoc_func_with_same_name,
} => {
let expr = expr_syntax(*expr);
UnresolvedMethodCall {
expr,
name: name.clone(),
receiver: Type::new(db, DefWithBodyId::from(self), receiver.clone()),
field_with_same_name: field_with_same_name
.clone()
.map(|ty| Type::new(db, DefWithBodyId::from(self), ty)),
assoc_func_with_same_name: *assoc_func_with_same_name,
}
.into()
}
&InferenceDiagnostic::UnresolvedAssocItem { id } => {
let expr_or_pat = match id {
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right),
};
UnresolvedAssocItem { expr_or_pat }.into()
}
&InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break, bad_value_break } => {
let expr = expr_syntax(expr);
BreakOutsideOfLoop { expr, is_break, bad_value_break }.into()
}
InferenceDiagnostic::TypedHole { expr, expected } => {
let expr = expr_syntax(*expr);
TypedHole {
expr,
expected: Type::new(db, DefWithBodyId::from(self), expected.clone()),
}
.into()
}
&InferenceDiagnostic::MismatchedTupleStructPatArgCount { pat, expected, found } => {
let expr_or_pat = match pat {
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
ExprOrPatId::PatId(pat) => {
let InFile { file_id, value } =
source_map.pat_syntax(pat).expect("unexpected synthetic");
// cast from Either<Pat, SelfParam> -> Either<_, Pat>
let Some(ptr) = AstPtr::try_from_raw(value.syntax_node_ptr()) else {
continue;
};
InFile { file_id, value: ptr }
}
};
MismatchedTupleStructPatArgCount { expr_or_pat, expected, found }.into()
}
});
} }
for (pat_or_expr, mismatch) in infer.type_mismatches() { for (pat_or_expr, mismatch) in infer.type_mismatches() {
let expr_or_pat = match pat_or_expr { let expr_or_pat = match pat_or_expr {
@ -1857,109 +1746,7 @@ impl DefWithBody {
} }
for diagnostic in BodyValidationDiagnostic::collect(db, self.into()) { for diagnostic in BodyValidationDiagnostic::collect(db, self.into()) {
match diagnostic { acc.extend(AnyDiagnostic::body_validation_diagnostic(db, diagnostic, &source_map));
BodyValidationDiagnostic::RecordMissingFields {
record,
variant,
missed_fields,
} => {
let variant_data = variant.variant_data(db.upcast());
let missed_fields = missed_fields
.into_iter()
.map(|idx| variant_data.fields()[idx].name.clone())
.collect();
match record {
Either::Left(record_expr) => match source_map.expr_syntax(record_expr) {
Ok(source_ptr) => {
let root = source_ptr.file_syntax(db.upcast());
if let ast::Expr::RecordExpr(record_expr) =
source_ptr.value.to_node(&root)
{
if record_expr.record_expr_field_list().is_some() {
let field_list_parent_path =
record_expr.path().map(|path| AstPtr::new(&path));
acc.push(
MissingFields {
file: source_ptr.file_id,
field_list_parent: AstPtr::new(&Either::Left(
record_expr,
)),
field_list_parent_path,
missed_fields,
}
.into(),
)
}
}
}
Err(SyntheticSyntax) => (),
},
Either::Right(record_pat) => match source_map.pat_syntax(record_pat) {
Ok(source_ptr) => {
if let Some(ptr) = source_ptr.value.cast::<ast::RecordPat>() {
let root = source_ptr.file_syntax(db.upcast());
let record_pat = ptr.to_node(&root);
if record_pat.record_pat_field_list().is_some() {
let field_list_parent_path =
record_pat.path().map(|path| AstPtr::new(&path));
acc.push(
MissingFields {
file: source_ptr.file_id,
field_list_parent: AstPtr::new(&Either::Right(
record_pat,
)),
field_list_parent_path,
missed_fields,
}
.into(),
)
}
}
}
Err(SyntheticSyntax) => (),
},
}
}
BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap { method_call_expr } => {
if let Ok(next_source_ptr) = source_map.expr_syntax(method_call_expr) {
acc.push(
ReplaceFilterMapNextWithFindMap {
file: next_source_ptr.file_id,
next_expr: next_source_ptr.value,
}
.into(),
);
}
}
BodyValidationDiagnostic::MissingMatchArms { match_expr, uncovered_patterns } => {
match source_map.expr_syntax(match_expr) {
Ok(source_ptr) => {
let root = source_ptr.file_syntax(db.upcast());
if let ast::Expr::MatchExpr(match_expr) =
&source_ptr.value.to_node(&root)
{
match match_expr.expr() {
Some(scrut_expr) if match_expr.match_arm_list().is_some() => {
acc.push(
MissingMatchArms {
scrutinee_expr: InFile::new(
source_ptr.file_id,
AstPtr::new(&scrut_expr),
),
uncovered_patterns,
}
.into(),
);
}
_ => {}
}
}
}
Err(SyntheticSyntax) => (),
}
}
}
} }
let def: ModuleDef = match self { let def: ModuleDef = match self {
@ -1975,7 +1762,6 @@ impl DefWithBody {
} }
} }
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Function { pub struct Function {
pub(crate) id: FunctionId, pub(crate) id: FunctionId,
@ -4266,7 +4052,7 @@ impl Type {
name: Option<&Name>, name: Option<&Name>,
mut callback: impl FnMut(Function) -> Option<T>, mut callback: impl FnMut(Function) -> Option<T>,
) -> Option<T> { ) -> Option<T> {
let _p = profile::span("iterate_method_candidates"); let _p = tracing::span!(tracing::Level::INFO, "iterate_method_candidates");
let mut slot = None; let mut slot = None;
self.iterate_method_candidates_dyn( self.iterate_method_candidates_dyn(
@ -4345,7 +4131,7 @@ impl Type {
name: Option<&Name>, name: Option<&Name>,
mut callback: impl FnMut(AssocItem) -> Option<T>, mut callback: impl FnMut(AssocItem) -> Option<T>,
) -> Option<T> { ) -> Option<T> {
let _p = profile::span("iterate_path_candidates"); let _p = tracing::span!(tracing::Level::INFO, "iterate_path_candidates");
let mut slot = None; let mut slot = None;
self.iterate_path_candidates_dyn( self.iterate_path_candidates_dyn(
db, db,
@ -4411,7 +4197,7 @@ impl Type {
&'a self, &'a self,
db: &'a dyn HirDatabase, db: &'a dyn HirDatabase,
) -> impl Iterator<Item = Trait> + 'a { ) -> impl Iterator<Item = Trait> + 'a {
let _p = profile::span("applicable_inherent_traits"); let _p = tracing::span!(tracing::Level::INFO, "applicable_inherent_traits");
self.autoderef_(db) self.autoderef_(db)
.filter_map(|ty| ty.dyn_trait()) .filter_map(|ty| ty.dyn_trait())
.flat_map(move |dyn_trait_id| hir_ty::all_super_traits(db.upcast(), dyn_trait_id)) .flat_map(move |dyn_trait_id| hir_ty::all_super_traits(db.upcast(), dyn_trait_id))
@ -4419,7 +4205,7 @@ impl Type {
} }
pub fn env_traits<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Trait> + 'a { pub fn env_traits<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Trait> + 'a {
let _p = profile::span("env_traits"); let _p = tracing::span!(tracing::Level::INFO, "env_traits");
self.autoderef_(db) self.autoderef_(db)
.filter(|ty| matches!(ty.kind(Interner), TyKind::Placeholder(_))) .filter(|ty| matches!(ty.kind(Interner), TyKind::Placeholder(_)))
.flat_map(|ty| { .flat_map(|ty| {

View file

@ -25,6 +25,7 @@ use hir_expand::{
use itertools::Itertools; use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use span::{Span, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
use stdx::TupleExt; use stdx::TupleExt;
use syntax::{ use syntax::{
algo::skip_trivia_token, algo::skip_trivia_token,
@ -131,6 +132,7 @@ pub struct SemanticsImpl<'db> {
/// Rootnode to HirFileId cache /// Rootnode to HirFileId cache
cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>, cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
// These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens // These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens
// So we might wanna move them out into something specific for semantic highlighting
expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>, expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>,
/// MacroCall to its expansion's MacroFileId cache /// MacroCall to its expansion's MacroFileId cache
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>, macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
@ -607,29 +609,102 @@ impl<'db> SemanticsImpl<'db> {
res res
} }
fn descend_into_macros_impl( // return:
// SourceAnalyzer(file_id that original call include!)
// macro file id
// token in include! macro mapped from token in params
// span for the mapped token
fn is_from_include_file(
&self, &self,
token: SyntaxToken, token: SyntaxToken,
) -> Option<(SourceAnalyzer, HirFileId, SyntaxToken, Span)> {
let parent = token.parent()?;
let file_id = self.find_file(&parent).file_id.file_id()?;
let mut cache = self.expansion_info_cache.borrow_mut();
// iterate related crates and find all include! invocations that include_file_id matches
for (invoc, _) in self
.db
.relevant_crates(file_id)
.iter()
.flat_map(|krate| self.db.include_macro_invoc(*krate))
.filter(|&(_, include_file_id)| include_file_id == file_id)
{
let macro_file = invoc.as_macro_file();
let expansion_info = cache
.entry(macro_file)
.or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
// Create the source analyzer for the macro call scope
let Some(sa) = self.analyze_no_infer(&self.parse_or_expand(expansion_info.call_file()))
else {
continue;
};
{
let InMacroFile { file_id: macro_file, value } = expansion_info.expanded();
self.cache(value, macro_file.into());
}
// get mapped token in the include! macro file
let span = span::SpanData {
range: token.text_range(),
anchor: span::SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
ctx: SyntaxContextId::ROOT,
};
let Some(InMacroFile { file_id, value: mut mapped_tokens }) =
expansion_info.map_range_down(span)
else {
continue;
};
// if we find one, then return
if let Some(t) = mapped_tokens.next() {
return Some((sa, file_id.into(), t, span));
}
}
None
}
fn descend_into_macros_impl(
&self,
mut token: SyntaxToken,
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>, f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
) { ) {
let _p = profile::span("descend_into_macros"); let _p = tracing::span!(tracing::Level::INFO, "descend_into_macros");
let sa = match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) { let (sa, span, file_id) =
Some(it) => it, match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
None => return, Some(sa) => match sa.file_id.file_id() {
}; Some(file_id) => (
sa,
let span = match sa.file_id.file_id() { self.db.real_span_map(file_id).span_for_range(token.text_range()),
Some(file_id) => self.db.real_span_map(file_id).span_for_range(token.text_range()), file_id.into(),
None => { ),
stdx::never!(); None => {
return; stdx::never!();
} return;
}; }
},
None => {
// if we cannot find a source analyzer for this token, then we try to find out
// whether this file is an included file and treat that as the include input
let Some((it, macro_file_id, mapped_token, s)) =
self.is_from_include_file(token)
else {
return;
};
token = mapped_token;
(it, s, macro_file_id)
}
};
let mut cache = self.expansion_info_cache.borrow_mut(); let mut cache = self.expansion_info_cache.borrow_mut();
let mut mcache = self.macro_call_cache.borrow_mut(); let mut mcache = self.macro_call_cache.borrow_mut();
let def_map = sa.resolver.def_map(); let def_map = sa.resolver.def_map();
let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])];
let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| { let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
let expansion_info = cache let expansion_info = cache
.entry(macro_file) .entry(macro_file)
@ -651,8 +726,6 @@ impl<'db> SemanticsImpl<'db> {
res res
}; };
let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(sa.file_id, smallvec![token])];
while let Some((file_id, mut tokens)) = stack.pop() { while let Some((file_id, mut tokens)) = stack.pop() {
while let Some(token) = tokens.pop() { while let Some(token) = tokens.pop() {
let was_not_remapped = (|| { let was_not_remapped = (|| {
@ -1222,7 +1295,7 @@ impl<'db> SemanticsImpl<'db> {
offset: Option<TextSize>, offset: Option<TextSize>,
infer_body: bool, infer_body: bool,
) -> Option<SourceAnalyzer> { ) -> Option<SourceAnalyzer> {
let _p = profile::span("Semantics::analyze_impl"); let _p = tracing::span!(tracing::Level::INFO, "Semantics::analyze_impl");
let node = self.find_file(node); let node = self.find_file(node);
let container = self.with_ctx(|ctx| ctx.find_container(node))?; let container = self.with_ctx(|ctx| ctx.find_container(node))?;

View file

@ -117,7 +117,7 @@ pub(super) struct SourceToDefCtx<'a, 'b> {
impl SourceToDefCtx<'_, '_> { impl SourceToDefCtx<'_, '_> {
pub(super) fn file_to_def(&self, file: FileId) -> SmallVec<[ModuleId; 1]> { pub(super) fn file_to_def(&self, file: FileId) -> SmallVec<[ModuleId; 1]> {
let _p = profile::span("SourceBinder::to_module_def"); let _p = tracing::span!(tracing::Level::INFO, "SourceBinder::to_module_def");
let mut mods = SmallVec::new(); let mut mods = SmallVec::new();
for &crate_id in self.db.relevant_crates(file).iter() { for &crate_id in self.db.relevant_crates(file).iter() {
// FIXME: inner items // FIXME: inner items
@ -132,7 +132,7 @@ impl SourceToDefCtx<'_, '_> {
} }
pub(super) fn module_to_def(&self, src: InFile<ast::Module>) -> Option<ModuleId> { pub(super) fn module_to_def(&self, src: InFile<ast::Module>) -> Option<ModuleId> {
let _p = profile::span("module_to_def"); let _p = tracing::span!(tracing::Level::INFO, "module_to_def");
let parent_declaration = src let parent_declaration = src
.syntax() .syntax()
.ancestors_with_macros_skip_attr_item(self.db.upcast()) .ancestors_with_macros_skip_attr_item(self.db.upcast())
@ -153,7 +153,7 @@ impl SourceToDefCtx<'_, '_> {
} }
pub(super) fn source_file_to_def(&self, src: InFile<ast::SourceFile>) -> Option<ModuleId> { pub(super) fn source_file_to_def(&self, src: InFile<ast::SourceFile>) -> Option<ModuleId> {
let _p = profile::span("source_file_to_def"); let _p = tracing::span!(tracing::Level::INFO, "source_file_to_def");
let file_id = src.file_id.original_file(self.db.upcast()); let file_id = src.file_id.original_file(self.db.upcast());
self.file_to_def(file_id).first().copied() self.file_to_def(file_id).first().copied()
} }

View file

@ -17,6 +17,7 @@ cov-mark = "2.0.0-pre.1"
itertools.workspace = true itertools.workspace = true
either.workspace = true either.workspace = true
smallvec.workspace = true smallvec.workspace = true
tracing.workspace = true
# local deps # local deps
stdx.workspace = true stdx.workspace = true

View file

@ -105,7 +105,7 @@ fn add_missing_impl_members_inner(
assist_id: &'static str, assist_id: &'static str,
label: &'static str, label: &'static str,
) -> Option<()> { ) -> Option<()> {
let _p = profile::span("add_missing_impl_members_inner"); let _p = tracing::span!(tracing::Level::INFO, "add_missing_impl_members_inner");
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?; let impl_def = ctx.find_node_at_offset::<ast::Impl>()?;
let impl_ = ctx.sema.to_def(&impl_def)?; let impl_ = ctx.sema.to_def(&impl_def)?;
@ -370,17 +370,17 @@ impl<U> Foo<U> for S {
add_missing_impl_members, add_missing_impl_members,
r#" r#"
pub trait Trait<'a, 'b, A, B, C> { pub trait Trait<'a, 'b, A, B, C> {
fn foo(&self, one: &'a A, anoter: &'b B) -> &'a C; fn foo(&self, one: &'a A, another: &'b B) -> &'a C;
} }
impl<'x, 'y, T, V, U> Trait<'x, 'y, T, V, U> for () {$0}"#, impl<'x, 'y, T, V, U> Trait<'x, 'y, T, V, U> for () {$0}"#,
r#" r#"
pub trait Trait<'a, 'b, A, B, C> { pub trait Trait<'a, 'b, A, B, C> {
fn foo(&self, one: &'a A, anoter: &'b B) -> &'a C; fn foo(&self, one: &'a A, another: &'b B) -> &'a C;
} }
impl<'x, 'y, T, V, U> Trait<'x, 'y, T, V, U> for () { impl<'x, 'y, T, V, U> Trait<'x, 'y, T, V, U> for () {
fn foo(&self, one: &'x T, anoter: &'y V) -> &'x U { fn foo(&self, one: &'x T, another: &'y V) -> &'x U {
${0:todo!()} ${0:todo!()}
} }
}"#, }"#,
@ -393,7 +393,7 @@ impl<'x, 'y, T, V, U> Trait<'x, 'y, T, V, U> for () {
add_missing_default_members, add_missing_default_members,
r#" r#"
pub trait Trait<'a, 'b, A, B, C: Default> { pub trait Trait<'a, 'b, A, B, C: Default> {
fn foo(&self, _one: &'a A, _anoter: &'b B) -> (C, &'a i32) { fn foo(&self, _one: &'a A, _another: &'b B) -> (C, &'a i32) {
let value: &'a i32 = &0; let value: &'a i32 = &0;
(C::default(), value) (C::default(), value)
} }
@ -402,14 +402,14 @@ pub trait Trait<'a, 'b, A, B, C: Default> {
impl<'x, 'y, T, V, U: Default> Trait<'x, 'y, T, V, U> for () {$0}"#, impl<'x, 'y, T, V, U: Default> Trait<'x, 'y, T, V, U> for () {$0}"#,
r#" r#"
pub trait Trait<'a, 'b, A, B, C: Default> { pub trait Trait<'a, 'b, A, B, C: Default> {
fn foo(&self, _one: &'a A, _anoter: &'b B) -> (C, &'a i32) { fn foo(&self, _one: &'a A, _another: &'b B) -> (C, &'a i32) {
let value: &'a i32 = &0; let value: &'a i32 = &0;
(C::default(), value) (C::default(), value)
} }
} }
impl<'x, 'y, T, V, U: Default> Trait<'x, 'y, T, V, U> for () { impl<'x, 'y, T, V, U: Default> Trait<'x, 'y, T, V, U> for () {
$0fn foo(&self, _one: &'x T, _anoter: &'y V) -> (U, &'x i32) { $0fn foo(&self, _one: &'x T, _another: &'y V) -> (U, &'x i32) {
let value: &'x i32 = &0; let value: &'x i32 = &0;
(<U>::default(), value) (<U>::default(), value)
} }

View file

@ -163,7 +163,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
block block
} else { } else {
// `expr_replace` is a descendant of `to_wrap`, so both steps need to be // `expr_replace` is a descendant of `to_wrap`, so both steps need to be
// handled seperately, otherwise we wrap the wrong expression // handled separately, otherwise we wrap the wrong expression
let to_wrap = edit.make_mut(to_wrap); let to_wrap = edit.make_mut(to_wrap);
// Replace the target expr first so that we don't need to find where // Replace the target expr first so that we don't need to find where

View file

@ -418,7 +418,7 @@ where
} }
#[test] #[test]
fn new_function_with_generics_and_wheres() { fn new_function_with_generics_and_where() {
check_assist( check_assist(
generate_default_from_new, generate_default_from_new,
r#" r#"

View file

@ -295,7 +295,7 @@ fn generate_impl(
// those in strukt. // those in strukt.
// //
// These generics parameters will also be used in `field_ty` and // These generics parameters will also be used in `field_ty` and
// `where_clauses`, so we should substitude arguments in them as well. // `where_clauses`, so we should substitute arguments in them as well.
let strukt_params = resolve_name_conflicts(strukt_params, &old_impl_params); let strukt_params = resolve_name_conflicts(strukt_params, &old_impl_params);
let (field_ty, ty_where_clause) = match &strukt_params { let (field_ty, ty_where_clause) = match &strukt_params {
Some(strukt_params) => { Some(strukt_params) => {
@ -491,7 +491,7 @@ fn remove_useless_where_clauses(trait_ty: &ast::Type, self_ty: &ast::Type, wc: a
// Generate generic args that should be apply to current impl. // Generate generic args that should be apply to current impl.
// //
// For exmaple, say we have implementation `impl<A, B, C> Trait for B<A>`, // For example, say we have implementation `impl<A, B, C> Trait for B<A>`,
// and `b: B<T>` in struct `S<T>`. Then the `A` should be instantiated to `T`. // and `b: B<T>` in struct `S<T>`. Then the `A` should be instantiated to `T`.
// While the last two generic args `B` and `C` doesn't change, it remains // While the last two generic args `B` and `C` doesn't change, it remains
// `<B, C>`. So we apply `<T, B, C>` as generic arguments to impl. // `<B, C>`. So we apply `<T, B, C>` as generic arguments to impl.
@ -637,7 +637,7 @@ fn const_assoc_item(item: syntax::ast::Const, qual_path_ty: ast::Path) -> Option
let path_expr_segment = make::path_from_text(item.name()?.to_string().as_str()); let path_expr_segment = make::path_from_text(item.name()?.to_string().as_str());
// We want rhs of the const assignment to be a qualified path // We want rhs of the const assignment to be a qualified path
// The general case for const assigment can be found [here](`https://doc.rust-lang.org/reference/items/constant-items.html`) // The general case for const assignment can be found [here](`https://doc.rust-lang.org/reference/items/constant-items.html`)
// The qualified will have the following generic syntax : // The qualified will have the following generic syntax :
// <Base as Trait<GenArgs>>::ConstName; // <Base as Trait<GenArgs>>::ConstName;
// FIXME : We can't rely on `make::path_qualified` for now but it would be nice to replace the following with it. // FIXME : We can't rely on `make::path_qualified` for now but it would be nice to replace the following with it.
@ -779,7 +779,7 @@ impl Trait for Base {}
#[test] #[test]
fn test_self_ty() { fn test_self_ty() {
// trait whith `Self` type cannot be delegated // trait with `Self` type cannot be delegated
// //
// See the function `fn f() -> Self`. // See the function `fn f() -> Self`.
// It should be `fn f() -> Base` in `Base`, and `fn f() -> S` in `S` // It should be `fn f() -> Base` in `Base`, and `fn f() -> S` in `S`

View file

@ -6,7 +6,7 @@ use syntax::{
use crate::{AssistContext, AssistId, AssistKind, Assists}; use crate::{AssistContext, AssistId, AssistKind, Assists};
// FIXME: Generate proper `index_mut` method body refer to `index` method body may impossible due to the unpredicable case [#15581]. // FIXME: Generate proper `index_mut` method body refer to `index` method body may impossible due to the unpredictable case [#15581].
// Here just leave the `index_mut` method body be same as `index` method body, user can modify it manually to meet their need. // Here just leave the `index_mut` method body be same as `index` method body, user can modify it manually to meet their need.
// Assist: generate_mut_trait_impl // Assist: generate_mut_trait_impl

View file

@ -183,7 +183,7 @@ fn remove_items_visibility(item: &ast::AssocItem) {
fn strip_body(item: &ast::AssocItem) { fn strip_body(item: &ast::AssocItem) {
if let ast::AssocItem::Fn(f) = item { if let ast::AssocItem::Fn(f) = item {
if let Some(body) = f.body() { if let Some(body) = f.body() {
// In constrast to function bodies, we want to see no ws before a semicolon. // In contrast to function bodies, we want to see no ws before a semicolon.
// So let's remove them if we see any. // So let's remove them if we see any.
if let Some(prev) = body.syntax().prev_sibling_or_token() { if let Some(prev) = body.syntax().prev_sibling_or_token() {
if prev.kind() == SyntaxKind::WHITESPACE { if prev.kind() == SyntaxKind::WHITESPACE {

View file

@ -120,7 +120,7 @@ fn main() -> () {
} }
#[test] #[test]
fn fromed_in_child_mod_imported() { fn from_in_child_mod_imported() {
check_assist( check_assist(
into_to_qualified_from, into_to_qualified_from,
r#" r#"
@ -168,7 +168,7 @@ fn main() -> () {
} }
#[test] #[test]
fn fromed_in_child_mod_not_imported() { fn from_in_child_mod_not_imported() {
check_assist( check_assist(
into_to_qualified_from, into_to_qualified_from,
r#" r#"

View file

@ -1,8 +1,9 @@
use either::Either; use either::Either;
use ide_db::imports::{ use ide_db::imports::{
insert_use::{ImportGranularity, InsertUseConfig}, insert_use::{ImportGranularity, InsertUseConfig},
merge_imports::{try_merge_imports, try_merge_trees, MergeBehavior}, merge_imports::{try_merge_imports, try_merge_trees, try_normalize_use_tree, MergeBehavior},
}; };
use itertools::Itertools;
use syntax::{ use syntax::{
algo::neighbor, algo::neighbor,
ast::{self, edit_in_place::Removable}, ast::{self, edit_in_place::Removable},
@ -32,24 +33,13 @@ use Edit::*;
pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let (target, edits) = if ctx.has_empty_selection() { let (target, edits) = if ctx.has_empty_selection() {
// Merge a neighbor // Merge a neighbor
let mut tree: ast::UseTree = ctx.find_node_at_offset()?; cov_mark::hit!(merge_with_use_item_neighbors);
if ctx.config.insert_use.granularity == ImportGranularity::One let tree = ctx.find_node_at_offset::<ast::UseTree>()?.top_use_tree();
&& tree.parent_use_tree_list().is_some()
{
cov_mark::hit!(resolve_top_use_tree_for_import_one);
tree = tree.top_use_tree();
}
let target = tree.syntax().text_range(); let target = tree.syntax().text_range();
let edits = if let Some(use_item) = tree.syntax().parent().and_then(ast::Use::cast) { let use_item = tree.syntax().parent().and_then(ast::Use::cast)?;
cov_mark::hit!(merge_with_use_item_neighbors); let mut neighbor = next_prev().find_map(|dir| neighbor(&use_item, dir)).into_iter();
let mut neighbor = next_prev().find_map(|dir| neighbor(&use_item, dir)).into_iter(); let edits = use_item.try_merge_from(&mut neighbor, &ctx.config.insert_use);
use_item.try_merge_from(&mut neighbor, &ctx.config.insert_use)
} else {
cov_mark::hit!(merge_with_use_tree_neighbors);
let mut neighbor = next_prev().find_map(|dir| neighbor(&tree, dir)).into_iter();
tree.clone().try_merge_from(&mut neighbor, &ctx.config.insert_use)
};
(target, edits?) (target, edits?)
} else { } else {
// Merge selected // Merge selected
@ -94,7 +84,35 @@ pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio
for edit in edits_mut { for edit in edits_mut {
match edit { match edit {
Remove(it) => it.as_ref().either(Removable::remove, Removable::remove), Remove(it) => it.as_ref().either(Removable::remove, Removable::remove),
Replace(old, new) => ted::replace(old, new), Replace(old, new) => {
ted::replace(old, &new);
// If there's a selection and we're replacing a use tree in a tree list,
// normalize the parent use tree if it only contains the merged subtree.
if !ctx.has_empty_selection() {
let normalized_use_tree = ast::UseTree::cast(new)
.as_ref()
.and_then(ast::UseTree::parent_use_tree_list)
.and_then(|use_tree_list| {
if use_tree_list.use_trees().collect_tuple::<(_,)>().is_some() {
Some(use_tree_list.parent_use_tree())
} else {
None
}
})
.and_then(|target_tree| {
try_normalize_use_tree(
&target_tree,
ctx.config.insert_use.granularity.into(),
)
.map(|top_use_tree_flat| (target_tree, top_use_tree_flat))
});
if let Some((old_tree, new_tree)) = normalized_use_tree {
cov_mark::hit!(replace_parent_with_normalized_use_tree);
ted::replace(old_tree.syntax(), new_tree.syntax());
}
}
}
} }
} }
}, },
@ -201,20 +219,17 @@ use std::fmt$0::{Display, Debug};
use std::fmt::{Display, Debug}; use std::fmt::{Display, Debug};
", ",
r" r"
use std::fmt::{Display, Debug}; use std::fmt::{Debug, Display};
", ",
); );
// The assist macro below calls `check_assist_import_one` 4 times with different input // The assist macro below calls `check_assist_import_one` 4 times with different input
// use item variations based on the first 2 input parameters, but only 2 calls // use item variations based on the first 2 input parameters.
// contain `use {std::fmt$0::{Display, Debug}};` for which the top use tree will need
// to be resolved.
cov_mark::check_count!(resolve_top_use_tree_for_import_one, 2);
cov_mark::check_count!(merge_with_use_item_neighbors, 4); cov_mark::check_count!(merge_with_use_item_neighbors, 4);
check_assist_import_one_variations!( check_assist_import_one_variations!(
"std::fmt$0::{Display, Debug}", "std::fmt$0::{Display, Debug}",
"std::fmt::{Display, Debug}", "std::fmt::{Display, Debug}",
"use {std::fmt::{Display, Debug}};" "use {std::fmt::{Debug, Display}};"
); );
} }
@ -257,7 +272,7 @@ use std::fmt::{Debug, Display};
} }
#[test] #[test]
fn merge_self1() { fn merge_self() {
check_assist( check_assist(
merge_imports, merge_imports,
r" r"
@ -276,21 +291,8 @@ use std::fmt::{self, Display};
} }
#[test] #[test]
fn merge_self2() { fn not_applicable_to_single_import() {
check_assist( check_assist_not_applicable(merge_imports, "use std::{fmt, $0fmt::Display};");
merge_imports,
r"
use std::{fmt, $0fmt::Display};
",
r"
use std::{fmt::{self, Display}};
",
);
}
#[test]
fn not_applicable_to_single_one_style_import() {
cov_mark::check!(resolve_top_use_tree_for_import_one);
check_assist_not_applicable_for_import_one( check_assist_not_applicable_for_import_one(
merge_imports, merge_imports,
"use {std::{fmt, $0fmt::Display}};", "use {std::{fmt, $0fmt::Display}};",
@ -385,14 +387,14 @@ pub(in this::path) use std::fmt::{Debug, Display};
#[test] #[test]
fn test_merge_nested() { fn test_merge_nested() {
cov_mark::check!(merge_with_use_tree_neighbors);
check_assist( check_assist(
merge_imports, merge_imports,
r" r"
use std::{fmt$0::Debug, fmt::Display}; use std::{fmt$0::Debug, fmt::Error};
use std::{fmt::Write, fmt::Display};
", ",
r" r"
use std::{fmt::{Debug, Display}}; use std::fmt::{Debug, Display, Error, Write};
", ",
); );
} }
@ -402,10 +404,11 @@ use std::{fmt::{Debug, Display}};
check_assist( check_assist(
merge_imports, merge_imports,
r" r"
use std::{fmt::Debug, fmt$0::Display}; use std::{fmt::Debug, fmt$0::Error};
use std::{fmt::Write, fmt::Display};
", ",
r" r"
use std::{fmt::{Debug, Display}}; use std::fmt::{Debug, Display, Error, Write};
", ",
); );
} }
@ -419,13 +422,13 @@ use std$0::{fmt::{Write, Display}};
use std::{fmt::{self, Debug}}; use std::{fmt::{self, Debug}};
", ",
r" r"
use std::{fmt::{self, Debug, Display, Write}}; use std::fmt::{self, Debug, Display, Write};
", ",
); );
check_assist_import_one_variations!( check_assist_import_one_variations!(
"std$0::{fmt::{Write, Display}}", "std$0::{fmt::{Write, Display}}",
"std::{fmt::{self, Debug}}", "std::{fmt::{self, Debug}}",
"use {std::{fmt::{self, Debug, Display, Write}}};" "use {std::fmt::{self, Debug, Display, Write}};"
); );
} }
@ -438,26 +441,13 @@ use std$0::{fmt::{self, Debug}};
use std::{fmt::{Write, Display}}; use std::{fmt::{Write, Display}};
", ",
r" r"
use std::{fmt::{self, Debug, Display, Write}}; use std::fmt::{self, Debug, Display, Write};
", ",
); );
check_assist_import_one_variations!( check_assist_import_one_variations!(
"std$0::{fmt::{self, Debug}}", "std$0::{fmt::{self, Debug}}",
"std::{fmt::{Write, Display}}", "std::{fmt::{Write, Display}}",
"use {std::{fmt::{self, Debug, Display, Write}}};" "use {std::fmt::{self, Debug, Display, Write}};"
);
}
#[test]
fn test_merge_self_with_nested_self_item() {
check_assist(
merge_imports,
r"
use std::{fmt$0::{self, Debug}, fmt::{Write, Display}};
",
r"
use std::{fmt::{self, Debug, Display, Write}};
",
); );
} }
@ -470,13 +460,13 @@ use foo::$0{bar::{self}};
use foo::{bar}; use foo::{bar};
", ",
r" r"
use foo::{bar::{self}}; use foo::bar;
", ",
); );
check_assist_import_one_variations!( check_assist_import_one_variations!(
"foo::$0{bar::{self}}", "foo::$0{bar::{self}}",
"foo::{bar}", "foo::{bar}",
"use {foo::{bar::{self}}};" "use {foo::bar};"
); );
} }
@ -489,13 +479,13 @@ use foo::$0{bar};
use foo::{bar::{self}}; use foo::{bar::{self}};
", ",
r" r"
use foo::{bar::{self}}; use foo::bar;
", ",
); );
check_assist_import_one_variations!( check_assist_import_one_variations!(
"foo::$0{bar}", "foo::$0{bar}",
"foo::{bar::{self}}", "foo::{bar::{self}}",
"use {foo::{bar::{self}}};" "use {foo::bar};"
); );
} }
@ -508,13 +498,13 @@ use std$0::{fmt::*};
use std::{fmt::{self, Display}}; use std::{fmt::{self, Display}};
", ",
r" r"
use std::{fmt::{self, Display, *}}; use std::fmt::{self, Display, *};
", ",
); );
check_assist_import_one_variations!( check_assist_import_one_variations!(
"std$0::{fmt::*}", "std$0::{fmt::*}",
"std::{fmt::{self, Display}}", "std::{fmt::{self, Display}}",
"use {std::{fmt::{self, Display, *}}};" "use {std::fmt::{self, Display, *}};"
); );
} }
@ -579,29 +569,27 @@ use foo::{bar, baz};
check_assist( check_assist(
merge_imports, merge_imports,
r" r"
use { use foo$0::{
foo$0::bar, bar, baz,
foo::baz,
}; };
use foo::qux;
", ",
r" r"
use { use foo::{
foo::{bar, baz}, bar, baz, qux,
}; };
", ",
); );
check_assist( check_assist(
merge_imports, merge_imports,
r" r"
use { use foo::{
foo::baz, baz, bar,
foo$0::bar,
}; };
use foo$0::qux;
", ",
r" r"
use { use foo::{bar, baz, qux};
foo::{bar, baz},
};
", ",
); );
} }
@ -711,12 +699,19 @@ use std::{
};", };",
); );
// FIXME: Remove redundant braces. See also unnecessary-braces diagnostic.
cov_mark::check!(merge_with_selected_use_tree_neighbors); cov_mark::check!(merge_with_selected_use_tree_neighbors);
check_assist(
merge_imports,
r"use std::{fmt::Result, $0fmt::Display, fmt::Debug$0};",
r"use std::{fmt::Result, fmt::{Debug, Display}};",
);
cov_mark::check!(merge_with_selected_use_tree_neighbors);
cov_mark::check!(replace_parent_with_normalized_use_tree);
check_assist( check_assist(
merge_imports, merge_imports,
r"use std::$0{fmt::Display, fmt::Debug}$0;", r"use std::$0{fmt::Display, fmt::Debug}$0;",
r"use std::{fmt::{Debug, Display}};", r"use std::fmt::{Debug, Display};",
); );
} }
} }

View file

@ -0,0 +1,219 @@
use ide_db::imports::merge_imports::try_normalize_import;
use syntax::{ast, AstNode};
use crate::{
assist_context::{AssistContext, Assists},
AssistId, AssistKind,
};
// Assist: normalize_import
//
// Normalizes an import.
//
// ```
// use$0 std::{io, {fmt::Formatter}};
// ```
// ->
// ```
// use std::{fmt::Formatter, io};
// ```
pub(crate) fn normalize_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let use_item = if ctx.has_empty_selection() {
ctx.find_node_at_offset()?
} else {
ctx.covering_element().ancestors().find_map(ast::Use::cast)?
};
let target = use_item.syntax().text_range();
let normalized_use_item =
try_normalize_import(&use_item, ctx.config.insert_use.granularity.into())?;
acc.add(
AssistId("normalize_import", AssistKind::RefactorRewrite),
"Normalize import",
target,
|builder| {
builder.replace_ast(use_item, normalized_use_item);
},
)
}
#[cfg(test)]
mod tests {
use crate::tests::{
check_assist, check_assist_import_one, check_assist_not_applicable,
check_assist_not_applicable_for_import_one,
};
use super::*;
macro_rules! check_assist_variations {
($fixture: literal, $expected: literal) => {
check_assist(
normalize_import,
concat!("use $0", $fixture, ";"),
concat!("use ", $expected, ";"),
);
check_assist(
normalize_import,
concat!("$0use ", $fixture, ";"),
concat!("use ", $expected, ";"),
);
check_assist_import_one(
normalize_import,
concat!("use $0", $fixture, ";"),
concat!("use {", $expected, "};"),
);
check_assist_import_one(
normalize_import,
concat!("$0use ", $fixture, ";"),
concat!("use {", $expected, "};"),
);
check_assist_import_one(
normalize_import,
concat!("use $0{", $fixture, "};"),
concat!("use {", $expected, "};"),
);
check_assist_import_one(
normalize_import,
concat!("$0use {", $fixture, "};"),
concat!("use {", $expected, "};"),
);
check_assist(
normalize_import,
concat!("use $0", $fixture, "$0;"),
concat!("use ", $expected, ";"),
);
check_assist(
normalize_import,
concat!("$0use ", $fixture, ";$0"),
concat!("use ", $expected, ";"),
);
};
}
macro_rules! check_assist_not_applicable_variations {
($fixture: literal) => {
check_assist_not_applicable(normalize_import, concat!("use $0", $fixture, ";"));
check_assist_not_applicable(normalize_import, concat!("$0use ", $fixture, ";"));
check_assist_not_applicable_for_import_one(
normalize_import,
concat!("use $0{", $fixture, "};"),
);
check_assist_not_applicable_for_import_one(
normalize_import,
concat!("$0use {", $fixture, "};"),
);
};
}
#[test]
fn test_order() {
check_assist_variations!(
"foo::{*, Qux, bar::{Quux, Bar}, baz, FOO_BAZ, self, Baz}",
"foo::{self, bar::{Bar, Quux}, baz, Baz, Qux, FOO_BAZ, *}"
);
}
#[test]
fn test_redundant_braces() {
check_assist_variations!("foo::{bar::{baz, Qux}}", "foo::bar::{baz, Qux}");
check_assist_variations!("foo::{bar::{self}}", "foo::bar");
check_assist_variations!("foo::{bar::{*}}", "foo::bar::*");
check_assist_variations!("foo::{bar::{Qux as Quux}}", "foo::bar::Qux as Quux");
check_assist_variations!(
"foo::bar::{{FOO_BAZ, Qux, self}, {*, baz}}",
"foo::bar::{self, baz, Qux, FOO_BAZ, *}"
);
check_assist_variations!(
"foo::bar::{{{FOO_BAZ}, {{Qux}, {self}}}, {{*}, {baz}}}",
"foo::bar::{self, baz, Qux, FOO_BAZ, *}"
);
}
#[test]
fn test_merge() {
check_assist_variations!(
"foo::{*, bar, {FOO_BAZ, qux}, bar::{*, baz}, {Quux}}",
"foo::{bar::{self, baz, *}, qux, Quux, FOO_BAZ, *}"
);
check_assist_variations!(
"foo::{*, bar, {FOO_BAZ, qux}, bar::{*, baz}, {Quux, bar::{baz::Foo}}}",
"foo::{bar::{self, baz::{self, Foo}, *}, qux, Quux, FOO_BAZ, *}"
);
}
#[test]
fn test_merge_self() {
check_assist_variations!("std::{fmt, fmt::Display}", "std::fmt::{self, Display}");
}
#[test]
fn test_merge_nested() {
check_assist_variations!("std::{fmt::Debug, fmt::Display}", "std::fmt::{Debug, Display}");
}
#[test]
fn test_merge_nested2() {
check_assist_variations!("std::{fmt::Debug, fmt::Display}", "std::fmt::{Debug, Display}");
}
#[test]
fn test_merge_self_with_nested_self_item() {
check_assist_variations!(
"std::{fmt::{self, Debug}, fmt::{Write, Display}}",
"std::fmt::{self, Debug, Display, Write}"
);
}
#[test]
fn works_with_trailing_comma() {
check_assist(
normalize_import,
r"
use $0{
foo::bar,
foo::baz,
};
",
r"
use foo::{bar, baz};
",
);
check_assist_import_one(
normalize_import,
r"
use $0{
foo::bar,
foo::baz,
};
",
r"
use {
foo::{bar, baz},
};
",
);
}
#[test]
fn not_applicable_to_normalized_import() {
check_assist_not_applicable_variations!("foo::bar");
check_assist_not_applicable_variations!("foo::bar::*");
check_assist_not_applicable_variations!("foo::bar::Qux as Quux");
check_assist_not_applicable_variations!("foo::bar::{self, baz, Qux, FOO_BAZ, *}");
check_assist_not_applicable_variations!(
"foo::{self, bar::{Bar, Quux}, baz, Baz, Qux, FOO_BAZ, *}"
);
check_assist_not_applicable_variations!(
"foo::{bar::{self, baz, *}, qux, Quux, FOO_BAZ, *}"
);
check_assist_not_applicable_variations!(
"foo::{bar::{self, baz::{self, Foo}, *}, qux, Quux, FOO_BAZ, *}"
);
}
}

View file

@ -183,6 +183,7 @@ mod handlers {
mod move_guard; mod move_guard;
mod move_module_to_file; mod move_module_to_file;
mod move_to_mod_rs; mod move_to_mod_rs;
mod normalize_import;
mod number_representation; mod number_representation;
mod promote_local_to_const; mod promote_local_to_const;
mod pull_assignment_up; mod pull_assignment_up;
@ -300,6 +301,7 @@ mod handlers {
move_module_to_file::move_module_to_file, move_module_to_file::move_module_to_file,
move_to_mod_rs::move_to_mod_rs, move_to_mod_rs::move_to_mod_rs,
move_from_mod_rs::move_from_mod_rs, move_from_mod_rs::move_from_mod_rs,
normalize_import::normalize_import,
number_representation::reformat_number_literal, number_representation::reformat_number_literal,
pull_assignment_up::pull_assignment_up, pull_assignment_up::pull_assignment_up,
promote_local_to_const::promote_local_to_const, promote_local_to_const::promote_local_to_const,

View file

@ -2217,6 +2217,19 @@ fn t() {}
) )
} }
#[test]
fn doctest_normalize_import() {
check_doc_test(
"normalize_import",
r#####"
use$0 std::{io, {fmt::Formatter}};
"#####,
r#####"
use std::{fmt::Formatter, io};
"#####,
)
}
#[test] #[test]
fn doctest_promote_local_to_const() { fn doctest_promote_local_to_const() {
check_doc_test( check_doc_test(

View file

@ -2,6 +2,7 @@
use std::{fmt, fs, path::Path}; use std::{fmt, fs, path::Path};
use stdx::format_to_acc;
use test_utils::project_root; use test_utils::project_root;
#[test] #[test]
@ -172,8 +173,7 @@ impl fmt::Display for Assist {
fn hide_hash_comments(text: &str) -> String { fn hide_hash_comments(text: &str) -> String {
text.split('\n') // want final newline text.split('\n') // want final newline
.filter(|&it| !(it.starts_with("# ") || it == "#")) .filter(|&it| !(it.starts_with("# ") || it == "#"))
.map(|it| format!("{it}\n")) .fold(String::new(), |mut acc, it| format_to_acc!(acc, "{it}\n"))
.collect()
} }
fn reveal_hash_comments(text: &str) -> String { fn reveal_hash_comments(text: &str) -> String {
@ -187,6 +187,5 @@ fn reveal_hash_comments(text: &str) -> String {
it it
} }
}) })
.map(|it| format!("{it}\n")) .fold(String::new(), |mut acc, it| format_to_acc!(acc, "{it}\n"))
.collect()
} }

View file

@ -14,6 +14,7 @@ doctest = false
[dependencies] [dependencies]
cov-mark = "2.0.0-pre.1" cov-mark = "2.0.0-pre.1"
itertools.workspace = true itertools.workspace = true
tracing.workspace = true
once_cell = "1.17.0" once_cell = "1.17.0"
smallvec.workspace = true smallvec.workspace = true

View file

@ -31,7 +31,7 @@ pub(crate) fn complete_dot(
} }
let is_field_access = matches!(dot_access.kind, DotAccessKind::Field { .. }); let is_field_access = matches!(dot_access.kind, DotAccessKind::Field { .. });
let is_method_acces_with_parens = let is_method_access_with_parens =
matches!(dot_access.kind, DotAccessKind::Method { has_parens: true }); matches!(dot_access.kind, DotAccessKind::Method { has_parens: true });
complete_fields( complete_fields(
@ -41,7 +41,7 @@ pub(crate) fn complete_dot(
|acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty), |acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty),
|acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty), |acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty),
is_field_access, is_field_access,
is_method_acces_with_parens, is_method_access_with_parens,
); );
complete_methods(ctx, receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None)); complete_methods(ctx, receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None));
@ -114,14 +114,14 @@ fn complete_fields(
mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type), mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type),
mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type), mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type),
is_field_access: bool, is_field_access: bool,
is_method_acess_with_parens: bool, is_method_access_with_parens: bool,
) { ) {
let mut seen_names = FxHashSet::default(); let mut seen_names = FxHashSet::default();
for receiver in receiver.autoderef(ctx.db) { for receiver in receiver.autoderef(ctx.db) {
for (field, ty) in receiver.fields(ctx.db) { for (field, ty) in receiver.fields(ctx.db) {
if seen_names.insert(field.name(ctx.db)) if seen_names.insert(field.name(ctx.db))
&& (is_field_access && (is_field_access
|| (is_method_acess_with_parens && (ty.is_fn() || ty.is_closure()))) || (is_method_access_with_parens && (ty.is_fn() || ty.is_closure())))
{ {
named_field(acc, field, ty); named_field(acc, field, ty);
} }
@ -131,7 +131,7 @@ fn complete_fields(
// already seen without inserting into the hashset. // already seen without inserting into the hashset.
if !seen_names.contains(&hir::Name::new_tuple_field(i)) if !seen_names.contains(&hir::Name::new_tuple_field(i))
&& (is_field_access && (is_field_access
|| (is_method_acess_with_parens && (ty.is_fn() || ty.is_closure()))) || (is_method_access_with_parens && (ty.is_fn() || ty.is_closure())))
{ {
// Tuple fields are always public (tuple struct fields are handled above). // Tuple fields are always public (tuple struct fields are handled above).
tuple_index(acc, i, ty); tuple_index(acc, i, ty);

View file

@ -15,7 +15,7 @@ pub(crate) fn complete_expr_path(
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx, path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
expr_ctx: &ExprCtx, expr_ctx: &ExprCtx,
) { ) {
let _p = profile::span("complete_expr_path"); let _p = tracing::span!(tracing::Level::INFO, "complete_expr_path").entered();
if !ctx.qualifier_ctx.none() { if !ctx.qualifier_ctx.none() {
return; return;
} }

View file

@ -26,7 +26,6 @@ const SUPPORTED_CALLING_CONVENTIONS: &[&str] = &[
"ptx-kernel", "ptx-kernel",
"msp430-interrupt", "msp430-interrupt",
"x86-interrupt", "x86-interrupt",
"amdgpu-kernel",
"efiapi", "efiapi",
"avr-interrupt", "avr-interrupt",
"avr-non-blocking-interrupt", "avr-non-blocking-interrupt",

View file

@ -207,7 +207,8 @@ fn import_on_the_fly(
position: SyntaxNode, position: SyntaxNode,
potential_import_name: String, potential_import_name: String,
) -> Option<()> { ) -> Option<()> {
let _p = profile::span("import_on_the_fly").detail(|| potential_import_name.clone()); let _p =
tracing::span!(tracing::Level::INFO, "import_on_the_fly", ?potential_import_name).entered();
ImportScope::find_insert_use_container(&position, &ctx.sema)?; ImportScope::find_insert_use_container(&position, &ctx.sema)?;
@ -293,7 +294,8 @@ fn import_on_the_fly_pat_(
position: SyntaxNode, position: SyntaxNode,
potential_import_name: String, potential_import_name: String,
) -> Option<()> { ) -> Option<()> {
let _p = profile::span("import_on_the_fly_pat").detail(|| potential_import_name.clone()); let _p = tracing::span!(tracing::Level::INFO, "import_on_the_fly_pat", ?potential_import_name)
.entered();
ImportScope::find_insert_use_container(&position, &ctx.sema)?; ImportScope::find_insert_use_container(&position, &ctx.sema)?;
@ -343,7 +345,9 @@ fn import_on_the_fly_method(
position: SyntaxNode, position: SyntaxNode,
potential_import_name: String, potential_import_name: String,
) -> Option<()> { ) -> Option<()> {
let _p = profile::span("import_on_the_fly_method").detail(|| potential_import_name.clone()); let _p =
tracing::span!(tracing::Level::INFO, "import_on_the_fly_method", ?potential_import_name)
.entered();
ImportScope::find_insert_use_container(&position, &ctx.sema)?; ImportScope::find_insert_use_container(&position, &ctx.sema)?;

View file

@ -28,7 +28,7 @@ pub(crate) fn complete_item_list(
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx, path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
kind: &ItemListKind, kind: &ItemListKind,
) { ) {
let _p = profile::span("complete_item_list"); let _p = tracing::span!(tracing::Level::INFO, "complete_item_list").entered();
if path_ctx.is_trivial_path() { if path_ctx.is_trivial_path() {
add_keywords(acc, ctx, Some(kind)); add_keywords(acc, ctx, Some(kind));
} }

View file

@ -21,7 +21,7 @@ pub(crate) fn complete_mod(
return None; return None;
} }
let _p = profile::span("completion::complete_mod"); let _p = tracing::span!(tracing::Level::INFO, "completion::complete_mod").entered();
let mut current_module = ctx.module; let mut current_module = ctx.module;
// For `mod $0`, `ctx.module` is its parent, but for `mod f$0`, it's `mod f` itself, but we're // For `mod $0`, `ctx.module` is its parent, but for `mod f$0`, it's `mod f` itself, but we're

View file

@ -84,6 +84,13 @@ pub(crate) fn complete_postfix(
) )
.add_to(acc, ctx.db); .add_to(acc, ctx.db);
postfix_snippet(
"lete",
"let Ok else {}",
&format!("let Ok($1) = {receiver_text} else {{\n $2\n}};\n$0"),
)
.add_to(acc, ctx.db);
postfix_snippet( postfix_snippet(
"while", "while",
"while let Ok {}", "while let Ok {}",
@ -99,6 +106,13 @@ pub(crate) fn complete_postfix(
) )
.add_to(acc, ctx.db); .add_to(acc, ctx.db);
postfix_snippet(
"lete",
"let Some else {}",
&format!("let Some($1) = {receiver_text} else {{\n $2\n}};\n$0"),
)
.add_to(acc, ctx.db);
postfix_snippet( postfix_snippet(
"while", "while",
"while let Some {}", "while let Some {}",
@ -469,6 +483,29 @@ fn main() {
); );
} }
#[test]
fn option_letelse() {
check_edit(
"lete",
r#"
//- minicore: option
fn main() {
let bar = Some(true);
bar.$0
}
"#,
r#"
fn main() {
let bar = Some(true);
let Some($1) = bar else {
$2
};
$0
}
"#,
);
}
#[test] #[test]
fn result_match() { fn result_match() {
check_edit( check_edit(

View file

@ -15,7 +15,7 @@ pub(crate) fn complete_type_path(
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx, path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
location: &TypeLocation, location: &TypeLocation,
) { ) {
let _p = profile::span("complete_type_path"); let _p = tracing::span!(tracing::Level::INFO, "complete_type_path").entered();
let scope_def_applicable = |def| { let scope_def_applicable = |def| {
use hir::{GenericParam::*, ModuleDef::*}; use hir::{GenericParam::*, ModuleDef::*};

View file

@ -568,7 +568,8 @@ impl CompletionContext<'_> {
/// A version of [`SemanticsScope::process_all_names`] that filters out `#[doc(hidden)]` items and /// A version of [`SemanticsScope::process_all_names`] that filters out `#[doc(hidden)]` items and
/// passes all doc-aliases along, to funnel it into [`Completions::add_path_resolution`]. /// passes all doc-aliases along, to funnel it into [`Completions::add_path_resolution`].
pub(crate) fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef, Vec<SmolStr>)) { pub(crate) fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef, Vec<SmolStr>)) {
let _p = profile::span("CompletionContext::process_all_names"); let _p =
tracing::span!(tracing::Level::INFO, "CompletionContext::process_all_names").entered();
self.scope.process_all_names(&mut |name, def| { self.scope.process_all_names(&mut |name, def| {
if self.is_scope_def_hidden(def) { if self.is_scope_def_hidden(def) {
return; return;
@ -579,7 +580,8 @@ impl CompletionContext<'_> {
} }
pub(crate) fn process_all_names_raw(&self, f: &mut dyn FnMut(Name, ScopeDef)) { pub(crate) fn process_all_names_raw(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
let _p = profile::span("CompletionContext::process_all_names_raw"); let _p = tracing::span!(tracing::Level::INFO, "CompletionContext::process_all_names_raw")
.entered();
self.scope.process_all_names(f); self.scope.process_all_names(f);
} }
@ -637,7 +639,7 @@ impl<'a> CompletionContext<'a> {
position @ FilePosition { file_id, offset }: FilePosition, position @ FilePosition { file_id, offset }: FilePosition,
config: &'a CompletionConfig, config: &'a CompletionConfig,
) -> Option<(CompletionContext<'a>, CompletionAnalysis)> { ) -> Option<(CompletionContext<'a>, CompletionAnalysis)> {
let _p = profile::span("CompletionContext::new"); let _p = tracing::span!(tracing::Level::INFO, "CompletionContext::new").entered();
let sema = Semantics::new(db); let sema = Semantics::new(db);
let original_file = sema.parse(file_id); let original_file = sema.parse(file_id);

View file

@ -72,7 +72,7 @@ fn expand(
mut fake_ident_token: SyntaxToken, mut fake_ident_token: SyntaxToken,
relative_offset: TextSize, relative_offset: TextSize,
) -> ExpansionResult { ) -> ExpansionResult {
let _p = profile::span("CompletionContext::expand"); let _p = tracing::span!(tracing::Level::INFO, "CompletionContext::expand").entered();
let mut derive_ctx = None; let mut derive_ctx = None;
'expansion: loop { 'expansion: loop {
@ -211,7 +211,7 @@ fn analyze(
original_token: &SyntaxToken, original_token: &SyntaxToken,
self_token: &SyntaxToken, self_token: &SyntaxToken,
) -> Option<(CompletionAnalysis, (Option<Type>, Option<ast::NameOrNameRef>), QualifierCtx)> { ) -> Option<(CompletionAnalysis, (Option<Type>, Option<ast::NameOrNameRef>), QualifierCtx)> {
let _p = profile::span("CompletionContext::analyze"); let _p = tracing::span!(tracing::Level::INFO, "CompletionContext::analyze").entered();
let ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } = let ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } =
expansion_result; expansion_result;
@ -1267,8 +1267,7 @@ fn pattern_context_for(
pat pat
.syntax() .syntax()
.ancestors() .ancestors()
.skip_while(|it| ast::Pat::can_cast(it.kind())) .find(|it| !ast::Pat::can_cast(it.kind()))
.next()
.map_or((PatternRefutability::Irrefutable, false), |node| { .map_or((PatternRefutability::Irrefutable, false), |node| {
let refutability = match_ast! { let refutability = match_ast! {
match node { match node {

View file

@ -433,7 +433,7 @@ impl Builder {
} }
pub(crate) fn build(self, db: &RootDatabase) -> CompletionItem { pub(crate) fn build(self, db: &RootDatabase) -> CompletionItem {
let _p = profile::span("item::Builder::build"); let _p = tracing::span!(tracing::Level::INFO, "item::Builder::build").entered();
let label = self.label; let label = self.label;
let mut label_detail = None; let mut label_detail = None;

View file

@ -236,7 +236,7 @@ pub fn resolve_completion_edits(
FilePosition { file_id, offset }: FilePosition, FilePosition { file_id, offset }: FilePosition,
imports: impl IntoIterator<Item = (String, String)>, imports: impl IntoIterator<Item = (String, String)>,
) -> Option<Vec<TextEdit>> { ) -> Option<Vec<TextEdit>> {
let _p = profile::span("resolve_completion_edits"); let _p = tracing::span!(tracing::Level::INFO, "resolve_completion_edits").entered();
let sema = hir::Semantics::new(db); let sema = hir::Semantics::new(db);
let original_file = sema.parse(file_id); let original_file = sema.parse(file_id);

View file

@ -292,7 +292,7 @@ fn render_resolution_pat(
import_to_add: Option<LocatedImport>, import_to_add: Option<LocatedImport>,
resolution: ScopeDef, resolution: ScopeDef,
) -> Builder { ) -> Builder {
let _p = profile::span("render_resolution"); let _p = tracing::span!(tracing::Level::INFO, "render_resolution").entered();
use hir::ModuleDef::*; use hir::ModuleDef::*;
if let ScopeDef::ModuleDef(Macro(mac)) = resolution { if let ScopeDef::ModuleDef(Macro(mac)) = resolution {
@ -310,7 +310,7 @@ fn render_resolution_path(
import_to_add: Option<LocatedImport>, import_to_add: Option<LocatedImport>,
resolution: ScopeDef, resolution: ScopeDef,
) -> Builder { ) -> Builder {
let _p = profile::span("render_resolution"); let _p = tracing::span!(tracing::Level::INFO, "render_resolution").entered();
use hir::ModuleDef::*; use hir::ModuleDef::*;
match resolution { match resolution {
@ -418,7 +418,7 @@ fn render_resolution_simple_(
import_to_add: Option<LocatedImport>, import_to_add: Option<LocatedImport>,
resolution: ScopeDef, resolution: ScopeDef,
) -> Builder { ) -> Builder {
let _p = profile::span("render_resolution"); let _p = tracing::span!(tracing::Level::INFO, "render_resolution").entered();
let db = ctx.db(); let db = ctx.db();
let ctx = ctx.import_to_add(import_to_add); let ctx = ctx.import_to_add(import_to_add);

View file

@ -6,7 +6,7 @@ use ide_db::SymbolKind;
use crate::{item::CompletionItem, render::RenderContext}; use crate::{item::CompletionItem, render::RenderContext};
pub(crate) fn render_const(ctx: RenderContext<'_>, const_: hir::Const) -> Option<CompletionItem> { pub(crate) fn render_const(ctx: RenderContext<'_>, const_: hir::Const) -> Option<CompletionItem> {
let _p = profile::span("render_const"); let _p = tracing::span!(tracing::Level::INFO, "render_const").entered();
render(ctx, const_) render(ctx, const_)
} }

View file

@ -25,7 +25,7 @@ pub(crate) fn render_fn(
local_name: Option<hir::Name>, local_name: Option<hir::Name>,
func: hir::Function, func: hir::Function,
) -> Builder { ) -> Builder {
let _p = profile::span("render_fn"); let _p = tracing::span!(tracing::Level::INFO, "render_fn").entered();
render(ctx, local_name, func, FuncKind::Function(path_ctx)) render(ctx, local_name, func, FuncKind::Function(path_ctx))
} }
@ -36,7 +36,7 @@ pub(crate) fn render_method(
local_name: Option<hir::Name>, local_name: Option<hir::Name>,
func: hir::Function, func: hir::Function,
) -> Builder { ) -> Builder {
let _p = profile::span("render_method"); let _p = tracing::span!(tracing::Level::INFO, "render_method").entered();
render(ctx, local_name, func, FuncKind::Method(dot_access, receiver)) render(ctx, local_name, func, FuncKind::Method(dot_access, receiver))
} }

View file

@ -27,7 +27,7 @@ pub(crate) fn render_variant_lit(
variant: hir::Variant, variant: hir::Variant,
path: Option<hir::ModPath>, path: Option<hir::ModPath>,
) -> Option<Builder> { ) -> Option<Builder> {
let _p = profile::span("render_enum_variant"); let _p = tracing::span!(tracing::Level::INFO, "render_enum_variant").entered();
let db = ctx.db(); let db = ctx.db();
let name = local_name.unwrap_or_else(|| variant.name(db)); let name = local_name.unwrap_or_else(|| variant.name(db));
@ -41,7 +41,7 @@ pub(crate) fn render_struct_literal(
path: Option<hir::ModPath>, path: Option<hir::ModPath>,
local_name: Option<hir::Name>, local_name: Option<hir::Name>,
) -> Option<Builder> { ) -> Option<Builder> {
let _p = profile::span("render_struct_literal"); let _p = tracing::span!(tracing::Level::INFO, "render_struct_literal").entered();
let db = ctx.db(); let db = ctx.db();
let name = local_name.unwrap_or_else(|| strukt.name(db)); let name = local_name.unwrap_or_else(|| strukt.name(db));

Some files were not shown because too many files have changed in this diff Show more