mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-25 11:25:06 +00:00
Auto merge of #3291 - rust-lang:rustup-2024-02-06, r=saethlin
Automatic Rustup
This commit is contained in:
commit
5a10a7c20f
241 changed files with 4408 additions and 4715 deletions
24
.github/workflows/ci.yaml
vendored
24
.github/workflows/ci.yaml
vendored
|
@ -90,7 +90,7 @@ jobs:
|
||||||
- name: Switch to stable toolchain
|
- name: Switch to stable toolchain
|
||||||
run: |
|
run: |
|
||||||
rustup update --no-self-update stable
|
rustup update --no-self-update stable
|
||||||
rustup component add --toolchain stable rust-src
|
rustup component add --toolchain stable rust-src clippy
|
||||||
rustup default stable
|
rustup default stable
|
||||||
|
|
||||||
- name: Run analysis-stats on rust-analyzer
|
- name: Run analysis-stats on rust-analyzer
|
||||||
|
@ -103,6 +103,10 @@ jobs:
|
||||||
RUSTC_BOOTSTRAP: 1
|
RUSTC_BOOTSTRAP: 1
|
||||||
run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std
|
run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std
|
||||||
|
|
||||||
|
- name: clippy
|
||||||
|
if: matrix.os == 'ubuntu-latest'
|
||||||
|
run: cargo clippy --all-targets
|
||||||
|
|
||||||
# Weird targets to catch non-portable code
|
# Weird targets to catch non-portable code
|
||||||
rust-cross:
|
rust-cross:
|
||||||
if: github.repository == 'rust-lang/rust-analyzer'
|
if: github.repository == 'rust-lang/rust-analyzer'
|
||||||
|
@ -203,11 +207,25 @@ jobs:
|
||||||
working-directory: ./editors/code
|
working-directory: ./editors/code
|
||||||
if: needs.changes.outputs.typescript == 'true'
|
if: needs.changes.outputs.typescript == 'true'
|
||||||
|
|
||||||
|
typo-check:
|
||||||
|
name: Typo Check
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 10
|
||||||
|
env:
|
||||||
|
FORCE_COLOR: 1
|
||||||
|
TYPOS_VERSION: v1.18.0
|
||||||
|
steps:
|
||||||
|
- name: download typos
|
||||||
|
run: curl -LsSf https://github.com/crate-ci/typos/releases/download/$TYPOS_VERSION/typos-$TYPOS_VERSION-x86_64-unknown-linux-musl.tar.gz | tar zxf - -C ${CARGO_HOME:-~/.cargo}/bin
|
||||||
|
|
||||||
|
- name: check for typos
|
||||||
|
run: typos
|
||||||
|
|
||||||
end-success:
|
end-success:
|
||||||
name: bors build finished
|
name: bors build finished
|
||||||
if: github.event.pusher.name == 'bors' && success()
|
if: github.event.pusher.name == 'bors' && success()
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [rust, rust-cross, typescript]
|
needs: [rust, rust-cross, typescript, typo-check]
|
||||||
steps:
|
steps:
|
||||||
- name: Mark the job as successful
|
- name: Mark the job as successful
|
||||||
run: exit 0
|
run: exit 0
|
||||||
|
@ -216,7 +234,7 @@ jobs:
|
||||||
name: bors build finished
|
name: bors build finished
|
||||||
if: github.event.pusher.name == 'bors' && !success()
|
if: github.event.pusher.name == 'bors' && !success()
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [rust, rust-cross, typescript]
|
needs: [rust, rust-cross, typescript, typo-check]
|
||||||
steps:
|
steps:
|
||||||
- name: Mark the job as a failure
|
- name: Mark the job as a failure
|
||||||
run: exit 1
|
run: exit 1
|
||||||
|
|
4
.github/workflows/release.yaml
vendored
4
.github/workflows/release.yaml
vendored
|
@ -43,10 +43,10 @@ jobs:
|
||||||
- os: ubuntu-20.04
|
- os: ubuntu-20.04
|
||||||
target: arm-unknown-linux-gnueabihf
|
target: arm-unknown-linux-gnueabihf
|
||||||
code-target: linux-armhf
|
code-target: linux-armhf
|
||||||
- os: macos-11
|
- os: macos-12
|
||||||
target: x86_64-apple-darwin
|
target: x86_64-apple-darwin
|
||||||
code-target: darwin-x64
|
code-target: darwin-x64
|
||||||
- os: macos-11
|
- os: macos-12
|
||||||
target: aarch64-apple-darwin
|
target: aarch64-apple-darwin
|
||||||
code-target: darwin-arm64
|
code-target: darwin-arm64
|
||||||
|
|
||||||
|
|
31
.typos.toml
Normal file
31
.typos.toml
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
[default.extend-identifiers]
|
||||||
|
AnserStyle = "AnserStyle"
|
||||||
|
datas = "datas"
|
||||||
|
impl_froms = "impl_froms"
|
||||||
|
selfs = "selfs"
|
||||||
|
|
||||||
|
[default.extend-words]
|
||||||
|
anser = "anser"
|
||||||
|
ba = "ba"
|
||||||
|
fo = "fo"
|
||||||
|
ket = "ket"
|
||||||
|
makro = "makro"
|
||||||
|
raison = "raison"
|
||||||
|
trivias = "trivias"
|
||||||
|
TOOD = "TOOD"
|
||||||
|
|
||||||
|
[default]
|
||||||
|
extend-ignore-re = [
|
||||||
|
# ignore string which contains $x (x is a num), which use widely in test
|
||||||
|
".*\\$\\d.*",
|
||||||
|
# ignore generated content like `boxed....nner()`, `Defaul...efault`
|
||||||
|
"\\w*\\.{3,4}\\w*",
|
||||||
|
]
|
||||||
|
|
||||||
|
[files]
|
||||||
|
extend-exclude = [
|
||||||
|
"*.json",
|
||||||
|
"*.rast",
|
||||||
|
"crates/parser/test_data/lexer/err/*",
|
||||||
|
"bench_data/*",
|
||||||
|
]
|
152
Cargo.lock
generated
152
Cargo.lock
generated
|
@ -19,11 +19,11 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "always-assert"
|
name = "always-assert"
|
||||||
version = "0.1.3"
|
version = "0.2.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4436e0292ab1bb631b42973c61205e704475fe8126af845c8d923c0996328127"
|
checksum = "a1078fa1ce1e34b1872d8611ad921196d76bdd7027e949fbe31231abde201892"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"log",
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -78,6 +78,7 @@ dependencies = [
|
||||||
"span",
|
"span",
|
||||||
"stdx",
|
"stdx",
|
||||||
"syntax",
|
"syntax",
|
||||||
|
"tracing",
|
||||||
"triomphe",
|
"triomphe",
|
||||||
"vfs",
|
"vfs",
|
||||||
]
|
]
|
||||||
|
@ -166,7 +167,7 @@ checksum = "5676cea088c32290fe65c82895be9d06dd21e0fa49bb97ca840529e9417ab71a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn 2.0.39",
|
||||||
"synstructure",
|
"synstructure",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -312,6 +313,17 @@ dependencies = [
|
||||||
"parking_lot_core",
|
"parking_lot_core",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "derivative"
|
||||||
|
version = "2.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 1.0.109",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "derive_arbitrary"
|
name = "derive_arbitrary"
|
||||||
version = "1.3.2"
|
version = "1.3.2"
|
||||||
|
@ -320,7 +332,7 @@ checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn 2.0.39",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -483,8 +495,10 @@ dependencies = [
|
||||||
"profile",
|
"profile",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
|
"span",
|
||||||
"stdx",
|
"stdx",
|
||||||
"syntax",
|
"syntax",
|
||||||
|
"tracing",
|
||||||
"triomphe",
|
"triomphe",
|
||||||
"tt",
|
"tt",
|
||||||
]
|
]
|
||||||
|
@ -581,7 +595,8 @@ dependencies = [
|
||||||
"profile",
|
"profile",
|
||||||
"project-model",
|
"project-model",
|
||||||
"ra-ap-rustc_abi",
|
"ra-ap-rustc_abi",
|
||||||
"ra-ap-rustc_index",
|
"ra-ap-rustc_index 0.35.0",
|
||||||
|
"ra-ap-rustc_pattern_analysis",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
"scoped-tls",
|
"scoped-tls",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
|
@ -658,6 +673,7 @@ dependencies = [
|
||||||
"test-fixture",
|
"test-fixture",
|
||||||
"test-utils",
|
"test-utils",
|
||||||
"text-edit",
|
"text-edit",
|
||||||
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -678,6 +694,7 @@ dependencies = [
|
||||||
"test-fixture",
|
"test-fixture",
|
||||||
"test-utils",
|
"test-utils",
|
||||||
"text-edit",
|
"text-edit",
|
||||||
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -735,6 +752,7 @@ dependencies = [
|
||||||
"test-fixture",
|
"test-fixture",
|
||||||
"test-utils",
|
"test-utils",
|
||||||
"text-edit",
|
"text-edit",
|
||||||
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1330,6 +1348,7 @@ dependencies = [
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"perf-event",
|
"perf-event",
|
||||||
"tikv-jemalloc-ctl",
|
"tikv-jemalloc-ctl",
|
||||||
|
"tracing",
|
||||||
"winapi",
|
"winapi",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1407,43 +1426,66 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ra-ap-rustc_abi"
|
name = "ra-ap-rustc_abi"
|
||||||
version = "0.21.0"
|
version = "0.35.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7816f980fab89e878ff2e916e2077d484e3aa1c619a3cc982c8a417c3dfe45fa"
|
checksum = "3c0baa423a2c2bfd6e4bd40e7215f7ddebd12a649ce0b65078a38b91068895aa"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 1.3.2",
|
"bitflags 2.4.1",
|
||||||
"ra-ap-rustc_index",
|
"ra-ap-rustc_index 0.35.0",
|
||||||
"tracing",
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ra-ap-rustc_index"
|
name = "ra-ap-rustc_index"
|
||||||
version = "0.21.0"
|
version = "0.33.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8352918d61aa4afab9f2ed7314cf638976b20949b3d61d2f468c975b0d251f24"
|
checksum = "5e5313d7f243b63ef9e58d94355b11aa8499f1328055f1f58adf0a5ea7d2faca"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrayvec",
|
"arrayvec",
|
||||||
"ra-ap-rustc_index_macros",
|
"ra-ap-rustc_index_macros 0.33.0",
|
||||||
|
"smallvec",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ra-ap-rustc_index"
|
||||||
|
version = "0.35.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "322b751895cc4a0a2ee0c6ab36ec80bc8abf5f8d76254c482f96f03c27c92ebe"
|
||||||
|
dependencies = [
|
||||||
|
"arrayvec",
|
||||||
|
"ra-ap-rustc_index_macros 0.35.0",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ra-ap-rustc_index_macros"
|
name = "ra-ap-rustc_index_macros"
|
||||||
version = "0.21.0"
|
version = "0.33.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "66a9424018828155a3e3596515598f90e68427d8f35eff6df7f0856c73fc58a8"
|
checksum = "a83108ebf3e73dde205b9c25706209bcd7736480820f90ded28eabaf8b469f25"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn 2.0.39",
|
||||||
|
"synstructure",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ra-ap-rustc_index_macros"
|
||||||
|
version = "0.35.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "054e25eac52f0506c1309ca4317c11ad4925d7b99eb897f71aa7c3cbafb46c2b"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 2.0.39",
|
||||||
"synstructure",
|
"synstructure",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ra-ap-rustc_lexer"
|
name = "ra-ap-rustc_lexer"
|
||||||
version = "0.21.0"
|
version = "0.35.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "dc741c7a78103efab416b562e35bd73c8d4967478575010c86c6062f8d3cbf29"
|
checksum = "c8da0fa51a1a97ba4296a1c78fa454815a153b472e2546b6338a0902ad59e015"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-properties",
|
"unicode-properties",
|
||||||
"unicode-xid",
|
"unicode-xid",
|
||||||
|
@ -1451,14 +1493,28 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ra-ap-rustc_parse_format"
|
name = "ra-ap-rustc_parse_format"
|
||||||
version = "0.21.0"
|
version = "0.35.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d557201d71792487bd2bab637ab5be9aa6fff59b88e25e12de180b0f9d2df60f"
|
checksum = "3851f930a54adcb76889983dcd5c00a0c4e206e190e1384dbc00d49b82dfb45e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ra-ap-rustc_index",
|
"ra-ap-rustc_index 0.35.0",
|
||||||
"ra-ap-rustc_lexer",
|
"ra-ap-rustc_lexer",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ra-ap-rustc_pattern_analysis"
|
||||||
|
version = "0.33.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6c4085e0c771fd4b883930b599ef42966b855762bbe4052c17673b3253421a6d"
|
||||||
|
dependencies = [
|
||||||
|
"derivative",
|
||||||
|
"ra-ap-rustc_index 0.33.0",
|
||||||
|
"rustc-hash",
|
||||||
|
"rustc_apfloat",
|
||||||
|
"smallvec",
|
||||||
|
"tracing",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rayon"
|
name = "rayon"
|
||||||
version = "1.8.0"
|
version = "1.8.0"
|
||||||
|
@ -1555,7 +1611,6 @@ dependencies = [
|
||||||
"tikv-jemallocator",
|
"tikv-jemallocator",
|
||||||
"toolchain",
|
"toolchain",
|
||||||
"tracing",
|
"tracing",
|
||||||
"tracing-log",
|
|
||||||
"tracing-subscriber",
|
"tracing-subscriber",
|
||||||
"tracing-tree",
|
"tracing-tree",
|
||||||
"triomphe",
|
"triomphe",
|
||||||
|
@ -1569,31 +1624,31 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rust-analyzer-salsa"
|
name = "rust-analyzer-salsa"
|
||||||
version = "0.17.0-pre.5"
|
version = "0.17.0-pre.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ca9d387a9801f4fb9b366789ad1bfc08448cafc49cf148d907cfcd88ab665d7f"
|
checksum = "719825638c59fd26a55412a24561c7c5bcf54364c88b9a7a04ba08a6eafaba8d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"lock_api",
|
"lock_api",
|
||||||
"log",
|
|
||||||
"oorandom",
|
"oorandom",
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
"rust-analyzer-salsa-macros",
|
"rust-analyzer-salsa-macros",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
|
"tracing",
|
||||||
"triomphe",
|
"triomphe",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rust-analyzer-salsa-macros"
|
name = "rust-analyzer-salsa-macros"
|
||||||
version = "0.17.0-pre.5"
|
version = "0.17.0-pre.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a2035f385d7fae31e9b086f40b272ee1d79c484472f31c9a10348a406e841eaf"
|
checksum = "4d96498e9684848c6676c399032ebc37c52da95ecbefa83d71ccc53b9f8a4a8e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"heck",
|
"heck",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn 2.0.39",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1608,6 +1663,16 @@ version = "1.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustc_apfloat"
|
||||||
|
version = "0.2.0+llvm-462a31f5a5ab"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "465187772033a5ee566f69fe008df03628fce549a0899aae76f0a0c2e34696be"
|
||||||
|
dependencies = [
|
||||||
|
"bitflags 1.3.2",
|
||||||
|
"smallvec",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ryu"
|
name = "ryu"
|
||||||
version = "1.0.13"
|
version = "1.0.13"
|
||||||
|
@ -1625,9 +1690,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "scip"
|
name = "scip"
|
||||||
version = "0.3.1"
|
version = "0.3.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3e84d21062a3ba08d58870c8c36b0c005b2b2261c6ad1bf7042585427c781883"
|
checksum = "e5dc1bd66649133af84ab62436ddd2856c2605182b02dec2cd197f684dfe15ef"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"protobuf",
|
"protobuf",
|
||||||
]
|
]
|
||||||
|
@ -1670,7 +1735,7 @@ checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn 2.0.39",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1693,7 +1758,7 @@ checksum = "bcec881020c684085e55a25f7fd888954d56609ef363479dc5a1305eb0d40cab"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn 2.0.39",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1707,9 +1772,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "smallvec"
|
name = "smallvec"
|
||||||
version = "1.10.0"
|
version = "1.12.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
|
checksum = "2593d31f82ead8df961d8bd23a64c2ccf2eb5dd34b0a34bfb4dd54011c72009e"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "smol_str"
|
name = "smol_str"
|
||||||
|
@ -1770,6 +1835,17 @@ dependencies = [
|
||||||
"winapi",
|
"winapi",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "syn"
|
||||||
|
version = "1.0.109"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"unicode-ident",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.39"
|
version = "2.0.39"
|
||||||
|
@ -1789,7 +1865,7 @@ checksum = "285ba80e733fac80aa4270fbcdf83772a79b80aa35c97075320abfee4a915b06"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn 2.0.39",
|
||||||
"unicode-xid",
|
"unicode-xid",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1816,6 +1892,7 @@ dependencies = [
|
||||||
"stdx",
|
"stdx",
|
||||||
"test-utils",
|
"test-utils",
|
||||||
"text-edit",
|
"text-edit",
|
||||||
|
"tracing",
|
||||||
"triomphe",
|
"triomphe",
|
||||||
"ungrammar",
|
"ungrammar",
|
||||||
]
|
]
|
||||||
|
@ -1843,6 +1920,7 @@ dependencies = [
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
"stdx",
|
"stdx",
|
||||||
"text-size",
|
"text-size",
|
||||||
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1876,7 +1954,7 @@ checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn 2.0.39",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1977,7 +2055,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn 2.0.39",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
32
Cargo.toml
32
Cargo.toml
|
@ -79,10 +79,11 @@ tt = { path = "./crates/tt", version = "0.0.0" }
|
||||||
vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
|
vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
|
||||||
vfs = { path = "./crates/vfs", version = "0.0.0" }
|
vfs = { path = "./crates/vfs", version = "0.0.0" }
|
||||||
|
|
||||||
ra-ap-rustc_lexer = { version = "0.21.0", default-features = false }
|
ra-ap-rustc_lexer = { version = "0.35.0", default-features = false }
|
||||||
ra-ap-rustc_parse_format = { version = "0.21.0", default-features = false }
|
ra-ap-rustc_parse_format = { version = "0.35.0", default-features = false }
|
||||||
ra-ap-rustc_index = { version = "0.21.0", default-features = false }
|
ra-ap-rustc_index = { version = "0.35.0", default-features = false }
|
||||||
ra-ap-rustc_abi = { version = "0.21.0", default-features = false }
|
ra-ap-rustc_abi = { version = "0.35.0", default-features = false }
|
||||||
|
ra-ap-rustc_pattern_analysis = { version = "0.33.0", default-features = false }
|
||||||
|
|
||||||
# local crates that aren't published to crates.io. These should not have versions.
|
# local crates that aren't published to crates.io. These should not have versions.
|
||||||
sourcegen = { path = "./crates/sourcegen" }
|
sourcegen = { path = "./crates/sourcegen" }
|
||||||
|
@ -112,7 +113,7 @@ itertools = "0.12.0"
|
||||||
libc = "0.2.150"
|
libc = "0.2.150"
|
||||||
nohash-hasher = "0.2.0"
|
nohash-hasher = "0.2.0"
|
||||||
rayon = "1.8.0"
|
rayon = "1.8.0"
|
||||||
rust-analyzer-salsa = "0.17.0-pre.5"
|
rust-analyzer-salsa = "0.17.0-pre.6"
|
||||||
rustc-hash = "1.1.0"
|
rustc-hash = "1.1.0"
|
||||||
semver = "1.0.14"
|
semver = "1.0.14"
|
||||||
serde = { version = "1.0.192", features = ["derive"] }
|
serde = { version = "1.0.192", features = ["derive"] }
|
||||||
|
@ -127,9 +128,9 @@ text-size = "1.1.1"
|
||||||
tracing = "0.1.40"
|
tracing = "0.1.40"
|
||||||
tracing-tree = "0.3.0"
|
tracing-tree = "0.3.0"
|
||||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
|
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
|
||||||
"registry",
|
"registry",
|
||||||
"fmt",
|
"fmt",
|
||||||
"tracing-log",
|
"tracing-log",
|
||||||
] }
|
] }
|
||||||
triomphe = { version = "0.1.10", default-features = false, features = ["std"] }
|
triomphe = { version = "0.1.10", default-features = false, features = ["std"] }
|
||||||
xshell = "0.2.5"
|
xshell = "0.2.5"
|
||||||
|
@ -166,29 +167,14 @@ new_ret_no_self = "allow"
|
||||||
|
|
||||||
## Following lints should be tackled at some point
|
## Following lints should be tackled at some point
|
||||||
borrowed_box = "allow"
|
borrowed_box = "allow"
|
||||||
borrow_deref_ref = "allow"
|
|
||||||
derivable_impls = "allow"
|
|
||||||
derived_hash_with_manual_eq = "allow"
|
derived_hash_with_manual_eq = "allow"
|
||||||
field_reassign_with_default = "allow"
|
|
||||||
forget_non_drop = "allow"
|
forget_non_drop = "allow"
|
||||||
format_collect = "allow"
|
|
||||||
large_enum_variant = "allow"
|
|
||||||
needless_doctest_main = "allow"
|
needless_doctest_main = "allow"
|
||||||
new_without_default = "allow"
|
|
||||||
non_canonical_clone_impl = "allow"
|
non_canonical_clone_impl = "allow"
|
||||||
non_canonical_partial_ord_impl = "allow"
|
non_canonical_partial_ord_impl = "allow"
|
||||||
self_named_constructors = "allow"
|
self_named_constructors = "allow"
|
||||||
skip_while_next = "allow"
|
|
||||||
too_many_arguments = "allow"
|
too_many_arguments = "allow"
|
||||||
toplevel_ref_arg = "allow"
|
|
||||||
type_complexity = "allow"
|
type_complexity = "allow"
|
||||||
unnecessary_cast = "allow"
|
|
||||||
unnecessary_filter_map = "allow"
|
|
||||||
unnecessary_lazy_evaluations = "allow"
|
|
||||||
unnecessary_mut_passed = "allow"
|
|
||||||
useless_conversion = "allow"
|
|
||||||
useless_format = "allow"
|
|
||||||
wildcard_in_or_patterns = "allow"
|
|
||||||
wrong_self_convention = "allow"
|
wrong_self_convention = "allow"
|
||||||
|
|
||||||
## warn at following lints
|
## warn at following lints
|
||||||
|
|
|
@ -17,6 +17,7 @@ rust-analyzer-salsa.workspace = true
|
||||||
rustc-hash.workspace = true
|
rustc-hash.workspace = true
|
||||||
triomphe.workspace = true
|
triomphe.workspace = true
|
||||||
semver.workspace = true
|
semver.workspace = true
|
||||||
|
tracing.workspace = true
|
||||||
|
|
||||||
# local deps
|
# local deps
|
||||||
cfg.workspace = true
|
cfg.workspace = true
|
||||||
|
@ -27,4 +28,4 @@ vfs.workspace = true
|
||||||
span.workspace = true
|
span.workspace = true
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
|
@ -51,7 +51,7 @@ impl FileChange {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn apply(self, db: &mut dyn SourceDatabaseExt) {
|
pub fn apply(self, db: &mut dyn SourceDatabaseExt) {
|
||||||
let _p = profile::span("RootDatabase::apply_change");
|
let _p = tracing::span!(tracing::Level::INFO, "RootDatabase::apply_change").entered();
|
||||||
if let Some(roots) = self.roots {
|
if let Some(roots) = self.roots {
|
||||||
for (idx, root) in roots.into_iter().enumerate() {
|
for (idx, root) in roots.into_iter().enumerate() {
|
||||||
let root_id = SourceRootId(idx as u32);
|
let root_id = SourceRootId(idx as u32);
|
||||||
|
|
|
@ -494,7 +494,7 @@ impl CrateGraph {
|
||||||
from: CrateId,
|
from: CrateId,
|
||||||
dep: Dependency,
|
dep: Dependency,
|
||||||
) -> Result<(), CyclicDependenciesError> {
|
) -> Result<(), CyclicDependenciesError> {
|
||||||
let _p = profile::span("add_dep");
|
let _p = tracing::span!(tracing::Level::INFO, "add_dep").entered();
|
||||||
|
|
||||||
self.check_cycle_after_dependency(from, dep.crate_id)?;
|
self.check_cycle_after_dependency(from, dep.crate_id)?;
|
||||||
|
|
||||||
|
|
|
@ -2,8 +2,8 @@
|
||||||
|
|
||||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||||
|
|
||||||
mod input;
|
|
||||||
mod change;
|
mod change;
|
||||||
|
mod input;
|
||||||
|
|
||||||
use std::panic;
|
use std::panic;
|
||||||
|
|
||||||
|
@ -65,7 +65,7 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
|
fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
|
||||||
let _p = profile::span("parse_query").detail(|| format!("{file_id:?}"));
|
let _p = tracing::span!(tracing::Level::INFO, "parse_query", ?file_id).entered();
|
||||||
let text = db.file_text(file_id);
|
let text = db.file_text(file_id);
|
||||||
SourceFile::parse(&text)
|
SourceFile::parse(&text)
|
||||||
}
|
}
|
||||||
|
@ -116,7 +116,7 @@ impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> {
|
fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> {
|
||||||
let _p = profile::span("relevant_crates");
|
let _p = tracing::span!(tracing::Level::INFO, "relevant_crates").entered();
|
||||||
let source_root = self.0.file_source_root(file_id);
|
let source_root = self.0.file_source_root(file_id);
|
||||||
self.0.source_root_crates(source_root)
|
self.0.source_root_crates(source_root)
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,28 +18,6 @@ pub enum CfgAtom {
|
||||||
KeyValue { key: SmolStr, value: SmolStr },
|
KeyValue { key: SmolStr, value: SmolStr },
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CfgAtom {
|
|
||||||
/// Returns `true` when the atom comes from the target specification.
|
|
||||||
///
|
|
||||||
/// If this returns `true`, then changing this atom requires changing the compilation target. If
|
|
||||||
/// it returns `false`, the atom might come from a build script or the build system.
|
|
||||||
pub fn is_target_defined(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
CfgAtom::Flag(flag) => matches!(&**flag, "unix" | "windows"),
|
|
||||||
CfgAtom::KeyValue { key, value: _ } => matches!(
|
|
||||||
&**key,
|
|
||||||
"target_arch"
|
|
||||||
| "target_os"
|
|
||||||
| "target_env"
|
|
||||||
| "target_family"
|
|
||||||
| "target_endian"
|
|
||||||
| "target_pointer_width"
|
|
||||||
| "target_vendor" // NOTE: `target_feature` is left out since it can be configured via `-Ctarget-feature`
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for CfgAtom {
|
impl fmt::Display for CfgAtom {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
|
|
|
@ -131,11 +131,9 @@ impl CfgDiff {
|
||||||
/// of both.
|
/// of both.
|
||||||
pub fn new(enable: Vec<CfgAtom>, disable: Vec<CfgAtom>) -> Option<CfgDiff> {
|
pub fn new(enable: Vec<CfgAtom>, disable: Vec<CfgAtom>) -> Option<CfgDiff> {
|
||||||
let mut occupied = FxHashSet::default();
|
let mut occupied = FxHashSet::default();
|
||||||
for item in enable.iter().chain(disable.iter()) {
|
if enable.iter().chain(disable.iter()).any(|item| !occupied.insert(item)) {
|
||||||
if !occupied.insert(item) {
|
// was present
|
||||||
// was present
|
return None;
|
||||||
return None;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(CfgDiff { enable, disable })
|
Some(CfgDiff { enable, disable })
|
||||||
|
|
|
@ -493,7 +493,9 @@ impl CargoActor {
|
||||||
// Skip certain kinds of messages to only spend time on what's useful
|
// Skip certain kinds of messages to only spend time on what's useful
|
||||||
JsonMessage::Cargo(message) => match message {
|
JsonMessage::Cargo(message) => match message {
|
||||||
cargo_metadata::Message::CompilerArtifact(artifact) if !artifact.fresh => {
|
cargo_metadata::Message::CompilerArtifact(artifact) if !artifact.fresh => {
|
||||||
self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap();
|
self.sender
|
||||||
|
.send(CargoMessage::CompilerArtifact(Box::new(artifact)))
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
cargo_metadata::Message::CompilerMessage(msg) => {
|
cargo_metadata::Message::CompilerMessage(msg) => {
|
||||||
self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap();
|
self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap();
|
||||||
|
@ -538,7 +540,7 @@ impl CargoActor {
|
||||||
}
|
}
|
||||||
|
|
||||||
enum CargoMessage {
|
enum CargoMessage {
|
||||||
CompilerArtifact(cargo_metadata::Artifact),
|
CompilerArtifact(Box<cargo_metadata::Artifact>),
|
||||||
Diagnostic(Diagnostic),
|
Diagnostic(Diagnostic),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -32,6 +32,7 @@ use crate::{
|
||||||
VariantId,
|
VariantId,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/// Desugared attributes of an item post `cfg_attr` expansion.
|
||||||
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct Attrs(RawAttrs);
|
pub struct Attrs(RawAttrs);
|
||||||
|
|
||||||
|
@ -74,7 +75,7 @@ impl Attrs {
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
v: VariantId,
|
v: VariantId,
|
||||||
) -> Arc<ArenaMap<LocalFieldId, Attrs>> {
|
) -> Arc<ArenaMap<LocalFieldId, Attrs>> {
|
||||||
let _p = profile::span("fields_attrs_query");
|
let _p = tracing::span!(tracing::Level::INFO, "fields_attrs_query").entered();
|
||||||
// FIXME: There should be some proper form of mapping between item tree field ids and hir field ids
|
// FIXME: There should be some proper form of mapping between item tree field ids and hir field ids
|
||||||
let mut res = ArenaMap::default();
|
let mut res = ArenaMap::default();
|
||||||
|
|
||||||
|
@ -228,7 +229,6 @@ pub enum DocAtom {
|
||||||
KeyValue { key: SmolStr, value: SmolStr },
|
KeyValue { key: SmolStr, value: SmolStr },
|
||||||
}
|
}
|
||||||
|
|
||||||
// Adapted from `CfgExpr` parsing code
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
pub enum DocExpr {
|
pub enum DocExpr {
|
||||||
Invalid,
|
Invalid,
|
||||||
|
@ -322,7 +322,7 @@ impl AttrsWithOwner {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Attrs {
|
pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Attrs {
|
||||||
let _p = profile::span("attrs_query");
|
let _p = tracing::span!(tracing::Level::INFO, "attrs_query").entered();
|
||||||
// FIXME: this should use `Trace` to avoid duplication in `source_map` below
|
// FIXME: this should use `Trace` to avoid duplication in `source_map` below
|
||||||
let raw_attrs = match def {
|
let raw_attrs = match def {
|
||||||
AttrDefId::ModuleId(module) => {
|
AttrDefId::ModuleId(module) => {
|
||||||
|
@ -448,10 +448,7 @@ impl AttrsWithOwner {
|
||||||
let map = db.fields_attrs_source_map(id.parent);
|
let map = db.fields_attrs_source_map(id.parent);
|
||||||
let file_id = id.parent.file_id(db);
|
let file_id = id.parent.file_id(db);
|
||||||
let root = db.parse_or_expand(file_id);
|
let root = db.parse_or_expand(file_id);
|
||||||
let owner = match &map[id.local_id] {
|
let owner = ast::AnyHasAttrs::new(map[id.local_id].to_node(&root));
|
||||||
Either::Left(it) => ast::AnyHasAttrs::new(it.to_node(&root)),
|
|
||||||
Either::Right(it) => ast::AnyHasAttrs::new(it.to_node(&root)),
|
|
||||||
};
|
|
||||||
InFile::new(file_id, owner)
|
InFile::new(file_id, owner)
|
||||||
}
|
}
|
||||||
AttrDefId::AdtId(adt) => match adt {
|
AttrDefId::AdtId(adt) => match adt {
|
||||||
|
@ -634,7 +631,7 @@ fn attrs_from_item_tree_assoc<'db, N: ItemTreeModItemNode>(
|
||||||
pub(crate) fn fields_attrs_source_map(
|
pub(crate) fn fields_attrs_source_map(
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
def: VariantId,
|
def: VariantId,
|
||||||
) -> Arc<ArenaMap<LocalFieldId, Either<AstPtr<ast::TupleField>, AstPtr<ast::RecordField>>>> {
|
) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>> {
|
||||||
let mut res = ArenaMap::default();
|
let mut res = ArenaMap::default();
|
||||||
let child_source = def.child_source(db);
|
let child_source = def.child_source(db);
|
||||||
|
|
||||||
|
@ -643,7 +640,7 @@ pub(crate) fn fields_attrs_source_map(
|
||||||
idx,
|
idx,
|
||||||
variant
|
variant
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.either(|l| Either::Left(AstPtr::new(l)), |r| Either::Right(AstPtr::new(r))),
|
.either(|l| AstPtr::new(l).wrap_left(), |r| AstPtr::new(r).wrap_right()),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
//! Defines `Body`: a lowered representation of bodies of functions, statics and
|
//! Defines `Body`: a lowered representation of bodies of functions, statics and
|
||||||
//! consts.
|
//! consts.
|
||||||
mod lower;
|
mod lower;
|
||||||
|
mod pretty;
|
||||||
|
pub mod scope;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
pub mod scope;
|
|
||||||
mod pretty;
|
|
||||||
|
|
||||||
use std::ops::Index;
|
use std::ops::Index;
|
||||||
|
|
||||||
|
@ -122,7 +122,7 @@ impl Body {
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
def: DefWithBodyId,
|
def: DefWithBodyId,
|
||||||
) -> (Arc<Body>, Arc<BodySourceMap>) {
|
) -> (Arc<Body>, Arc<BodySourceMap>) {
|
||||||
let _p = profile::span("body_with_source_map_query");
|
let _p = tracing::span!(tracing::Level::INFO, "body_with_source_map_query").entered();
|
||||||
let mut params = None;
|
let mut params = None;
|
||||||
|
|
||||||
let mut is_async_fn = false;
|
let mut is_async_fn = false;
|
||||||
|
|
|
@ -1335,6 +1335,7 @@ impl ExprCollector<'_> {
|
||||||
let args = record_pat_field_list
|
let args = record_pat_field_list
|
||||||
.fields()
|
.fields()
|
||||||
.filter_map(|f| {
|
.filter_map(|f| {
|
||||||
|
self.check_cfg(&f)?;
|
||||||
let ast_pat = f.pat()?;
|
let ast_pat = f.pat()?;
|
||||||
let pat = self.collect_pat(ast_pat, binding_list);
|
let pat = self.collect_pat(ast_pat, binding_list);
|
||||||
let name = f.field_name()?.as_name();
|
let name = f.field_name()?.as_name();
|
||||||
|
|
|
@ -33,7 +33,7 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
}),
|
}),
|
||||||
DefWithBodyId::InTypeConstId(_) => format!("In type const = "),
|
DefWithBodyId::InTypeConstId(_) => "In type const = ".to_string(),
|
||||||
DefWithBodyId::VariantId(it) => {
|
DefWithBodyId::VariantId(it) => {
|
||||||
let loc = it.lookup(db);
|
let loc = it.lookup(db);
|
||||||
let enum_loc = loc.parent.lookup(db);
|
let enum_loc = loc.parent.lookup(db);
|
||||||
|
|
|
@ -256,7 +256,7 @@ impl SsrError {
|
||||||
"##,
|
"##,
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(db.body_with_source_map(def.into()).1.diagnostics(), &[]);
|
assert_eq!(db.body_with_source_map(def).1.diagnostics(), &[]);
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
fn main() {
|
fn main() {
|
||||||
_ = $crate::error::SsrError::new(
|
_ = $crate::error::SsrError::new(
|
||||||
|
@ -309,7 +309,7 @@ fn f() {
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
|
|
||||||
let (_, source_map) = db.body_with_source_map(def.into());
|
let (_, source_map) = db.body_with_source_map(def);
|
||||||
assert_eq!(source_map.diagnostics(), &[]);
|
assert_eq!(source_map.diagnostics(), &[]);
|
||||||
|
|
||||||
for (_, def_map) in body.blocks(&db) {
|
for (_, def_map) in body.blocks(&db) {
|
||||||
|
|
|
@ -340,7 +340,7 @@ impl ImplData {
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
id: ImplId,
|
id: ImplId,
|
||||||
) -> (Arc<ImplData>, DefDiagnostics) {
|
) -> (Arc<ImplData>, DefDiagnostics) {
|
||||||
let _p = profile::span("impl_data_with_diagnostics_query");
|
let _p = tracing::span!(tracing::Level::INFO, "impl_data_with_diagnostics_query").entered();
|
||||||
let ItemLoc { container: module_id, id: tree_id } = id.lookup(db);
|
let ItemLoc { container: module_id, id: tree_id } = id.lookup(db);
|
||||||
|
|
||||||
let item_tree = tree_id.item_tree(db);
|
let item_tree = tree_id.item_tree(db);
|
||||||
|
@ -782,7 +782,7 @@ impl<'a> AssocItemCollector<'a> {
|
||||||
self.diagnostics.push(DefDiagnostic::macro_expansion_parse_error(
|
self.diagnostics.push(DefDiagnostic::macro_expansion_parse_error(
|
||||||
self.module_id.local_id,
|
self.module_id.local_id,
|
||||||
error_call_kind(),
|
error_call_kind(),
|
||||||
errors.into(),
|
errors,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
//! Defines database & queries for name resolution.
|
//! Defines database & queries for name resolution.
|
||||||
use base_db::{salsa, CrateId, SourceDatabase, Upcast};
|
use base_db::{salsa, CrateId, FileId, SourceDatabase, Upcast};
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir_expand::{db::ExpandDatabase, HirFileId, MacroDefId};
|
use hir_expand::{db::ExpandDatabase, HirFileId, MacroDefId};
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
use la_arena::ArenaMap;
|
use la_arena::ArenaMap;
|
||||||
|
use span::MacroCallId;
|
||||||
use syntax::{ast, AstPtr};
|
use syntax::{ast, AstPtr};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
|
@ -194,7 +195,7 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
|
||||||
fn fields_attrs_source_map(
|
fn fields_attrs_source_map(
|
||||||
&self,
|
&self,
|
||||||
def: VariantId,
|
def: VariantId,
|
||||||
) -> Arc<ArenaMap<LocalFieldId, Either<AstPtr<ast::TupleField>, AstPtr<ast::RecordField>>>>;
|
) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>>;
|
||||||
|
|
||||||
#[salsa::invoke(AttrsWithOwner::attrs_query)]
|
#[salsa::invoke(AttrsWithOwner::attrs_query)]
|
||||||
fn attrs(&self, def: AttrDefId) -> Attrs;
|
fn attrs(&self, def: AttrDefId) -> Attrs;
|
||||||
|
@ -234,10 +235,26 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
|
||||||
fn crate_notable_traits(&self, krate: CrateId) -> Option<Arc<[TraitId]>>;
|
fn crate_notable_traits(&self, krate: CrateId) -> Option<Arc<[TraitId]>>;
|
||||||
|
|
||||||
fn crate_supports_no_std(&self, crate_id: CrateId) -> bool;
|
fn crate_supports_no_std(&self, crate_id: CrateId) -> bool;
|
||||||
|
|
||||||
|
fn include_macro_invoc(&self, crate_id: CrateId) -> Vec<(MacroCallId, FileId)>;
|
||||||
|
}
|
||||||
|
|
||||||
|
// return: macro call id and include file id
|
||||||
|
fn include_macro_invoc(db: &dyn DefDatabase, krate: CrateId) -> Vec<(MacroCallId, FileId)> {
|
||||||
|
db.crate_def_map(krate)
|
||||||
|
.modules
|
||||||
|
.values()
|
||||||
|
.flat_map(|m| m.scope.iter_macro_invoc())
|
||||||
|
.filter_map(|invoc| {
|
||||||
|
db.lookup_intern_macro_call(*invoc.1)
|
||||||
|
.include_file_id(db.upcast(), *invoc.1)
|
||||||
|
.map(|x| (*invoc.1, x))
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn crate_def_map_wait(db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> {
|
fn crate_def_map_wait(db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> {
|
||||||
let _p = profile::span("crate_def_map:wait");
|
let _p = tracing::span!(tracing::Level::INFO, "crate_def_map:wait").entered();
|
||||||
db.crate_def_map_query(krate)
|
db.crate_def_map_query(krate)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,7 @@ use crate::{
|
||||||
item_scope::ItemInNs,
|
item_scope::ItemInNs,
|
||||||
nameres::DefMap,
|
nameres::DefMap,
|
||||||
path::{ModPath, PathKind},
|
path::{ModPath, PathKind},
|
||||||
visibility::{Visibility, VisibilityExplicity},
|
visibility::{Visibility, VisibilityExplicitness},
|
||||||
CrateRootModuleId, ModuleDefId, ModuleId,
|
CrateRootModuleId, ModuleDefId, ModuleId,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -26,7 +26,7 @@ pub fn find_path(
|
||||||
prefer_no_std: bool,
|
prefer_no_std: bool,
|
||||||
prefer_prelude: bool,
|
prefer_prelude: bool,
|
||||||
) -> Option<ModPath> {
|
) -> Option<ModPath> {
|
||||||
let _p = profile::span("find_path");
|
let _p = tracing::span!(tracing::Level::INFO, "find_path").entered();
|
||||||
find_path_inner(FindPathCtx { db, prefixed: None, prefer_no_std, prefer_prelude }, item, from)
|
find_path_inner(FindPathCtx { db, prefixed: None, prefer_no_std, prefer_prelude }, item, from)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@ pub fn find_path_prefixed(
|
||||||
prefer_no_std: bool,
|
prefer_no_std: bool,
|
||||||
prefer_prelude: bool,
|
prefer_prelude: bool,
|
||||||
) -> Option<ModPath> {
|
) -> Option<ModPath> {
|
||||||
let _p = profile::span("find_path_prefixed");
|
let _p = tracing::span!(tracing::Level::INFO, "find_path_prefixed").entered();
|
||||||
find_path_inner(
|
find_path_inner(
|
||||||
FindPathCtx { db, prefixed: Some(prefix_kind), prefer_no_std, prefer_prelude },
|
FindPathCtx { db, prefixed: Some(prefix_kind), prefer_no_std, prefer_prelude },
|
||||||
item,
|
item,
|
||||||
|
@ -497,7 +497,7 @@ fn find_local_import_locations(
|
||||||
item: ItemInNs,
|
item: ItemInNs,
|
||||||
from: ModuleId,
|
from: ModuleId,
|
||||||
) -> Vec<(ModuleId, Name)> {
|
) -> Vec<(ModuleId, Name)> {
|
||||||
let _p = profile::span("find_local_import_locations");
|
let _p = tracing::span!(tracing::Level::INFO, "find_local_import_locations").entered();
|
||||||
|
|
||||||
// `from` can import anything below `from` with visibility of at least `from`, and anything
|
// `from` can import anything below `from` with visibility of at least `from`, and anything
|
||||||
// above `from` with any visibility. That means we do not need to descend into private siblings
|
// above `from` with any visibility. That means we do not need to descend into private siblings
|
||||||
|
@ -544,11 +544,11 @@ fn find_local_import_locations(
|
||||||
if let Some((name, vis, declared)) = data.scope.name_of(item) {
|
if let Some((name, vis, declared)) = data.scope.name_of(item) {
|
||||||
if vis.is_visible_from(db, from) {
|
if vis.is_visible_from(db, from) {
|
||||||
let is_pub_or_explicit = match vis {
|
let is_pub_or_explicit = match vis {
|
||||||
Visibility::Module(_, VisibilityExplicity::Explicit) => {
|
Visibility::Module(_, VisibilityExplicitness::Explicit) => {
|
||||||
cov_mark::hit!(explicit_private_imports);
|
cov_mark::hit!(explicit_private_imports);
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
Visibility::Module(_, VisibilityExplicity::Implicit) => {
|
Visibility::Module(_, VisibilityExplicitness::Implicit) => {
|
||||||
cov_mark::hit!(discount_private_imports);
|
cov_mark::hit!(discount_private_imports);
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
|
@ -373,7 +373,7 @@ impl GenericParams {
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
def: GenericDefId,
|
def: GenericDefId,
|
||||||
) -> Interned<GenericParams> {
|
) -> Interned<GenericParams> {
|
||||||
let _p = profile::span("generic_params_query");
|
let _p = tracing::span!(tracing::Level::INFO, "generic_params_query").entered();
|
||||||
|
|
||||||
let krate = def.module(db).krate;
|
let krate = def.module(db).krate;
|
||||||
let cfg_options = db.crate_graph();
|
let cfg_options = db.crate_graph();
|
||||||
|
|
|
@ -12,8 +12,8 @@
|
||||||
//!
|
//!
|
||||||
//! See also a neighboring `body` module.
|
//! See also a neighboring `body` module.
|
||||||
|
|
||||||
pub mod type_ref;
|
|
||||||
pub mod format_args;
|
pub mod format_args;
|
||||||
|
pub mod type_ref;
|
||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
|
|
|
@ -166,6 +166,7 @@ enum PositionUsedAs {
|
||||||
}
|
}
|
||||||
use PositionUsedAs::*;
|
use PositionUsedAs::*;
|
||||||
|
|
||||||
|
#[allow(clippy::unnecessary_lazy_evaluations)]
|
||||||
pub(crate) fn parse(
|
pub(crate) fn parse(
|
||||||
s: &ast::String,
|
s: &ast::String,
|
||||||
fmt_snippet: Option<String>,
|
fmt_snippet: Option<String>,
|
||||||
|
@ -177,9 +178,9 @@ pub(crate) fn parse(
|
||||||
let text = s.text_without_quotes();
|
let text = s.text_without_quotes();
|
||||||
let str_style = match s.quote_offsets() {
|
let str_style = match s.quote_offsets() {
|
||||||
Some(offsets) => {
|
Some(offsets) => {
|
||||||
let raw = u32::from(offsets.quotes.0.len()) - 1;
|
let raw = usize::from(offsets.quotes.0.len()) - 1;
|
||||||
// subtract 1 for the `r` prefix
|
// subtract 1 for the `r` prefix
|
||||||
(raw != 0).then(|| raw as usize - 1)
|
(raw != 0).then(|| raw - 1)
|
||||||
}
|
}
|
||||||
None => None,
|
None => None,
|
||||||
};
|
};
|
||||||
|
@ -214,7 +215,7 @@ pub(crate) fn parse(
|
||||||
|
|
||||||
let mut used = vec![false; args.explicit_args().len()];
|
let mut used = vec![false; args.explicit_args().len()];
|
||||||
let mut invalid_refs = Vec::new();
|
let mut invalid_refs = Vec::new();
|
||||||
let mut numeric_refences_to_named_arg = Vec::new();
|
let mut numeric_references_to_named_arg = Vec::new();
|
||||||
|
|
||||||
enum ArgRef<'a> {
|
enum ArgRef<'a> {
|
||||||
Index(usize),
|
Index(usize),
|
||||||
|
@ -231,7 +232,7 @@ pub(crate) fn parse(
|
||||||
used[index] = true;
|
used[index] = true;
|
||||||
if arg.kind.ident().is_some() {
|
if arg.kind.ident().is_some() {
|
||||||
// This was a named argument, but it was used as a positional argument.
|
// This was a named argument, but it was used as a positional argument.
|
||||||
numeric_refences_to_named_arg.push((index, span, used_as));
|
numeric_references_to_named_arg.push((index, span, used_as));
|
||||||
}
|
}
|
||||||
Ok(index)
|
Ok(index)
|
||||||
} else {
|
} else {
|
||||||
|
@ -432,7 +433,7 @@ pub(crate) fn parse(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||||
pub struct FormatArgumentsCollector {
|
pub struct FormatArgumentsCollector {
|
||||||
arguments: Vec<FormatArgument>,
|
arguments: Vec<FormatArgument>,
|
||||||
num_unnamed_args: usize,
|
num_unnamed_args: usize,
|
||||||
|
@ -451,7 +452,7 @@ impl FormatArgumentsCollector {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self { arguments: vec![], names: vec![], num_unnamed_args: 0, num_explicit_args: 0 }
|
Default::default()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add(&mut self, arg: FormatArgument) -> usize {
|
pub fn add(&mut self, arg: FormatArgument) -> usize {
|
||||||
|
|
|
@ -75,7 +75,7 @@ impl ImportMap {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
|
pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
|
||||||
let _p = profile::span("import_map_query");
|
let _p = tracing::span!(tracing::Level::INFO, "import_map_query").entered();
|
||||||
|
|
||||||
let map = Self::collect_import_map(db, krate);
|
let map = Self::collect_import_map(db, krate);
|
||||||
|
|
||||||
|
@ -126,7 +126,7 @@ impl ImportMap {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex {
|
fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex {
|
||||||
let _p = profile::span("collect_import_map");
|
let _p = tracing::span!(tracing::Level::INFO, "collect_import_map").entered();
|
||||||
|
|
||||||
let def_map = db.crate_def_map(krate);
|
let def_map = db.crate_def_map(krate);
|
||||||
let mut map = FxIndexMap::default();
|
let mut map = FxIndexMap::default();
|
||||||
|
@ -216,7 +216,7 @@ impl ImportMap {
|
||||||
is_type_in_ns: bool,
|
is_type_in_ns: bool,
|
||||||
trait_import_info: &ImportInfo,
|
trait_import_info: &ImportInfo,
|
||||||
) {
|
) {
|
||||||
let _p = profile::span("collect_trait_assoc_items");
|
let _p = tracing::span!(tracing::Level::INFO, "collect_trait_assoc_items").entered();
|
||||||
for &(ref assoc_item_name, item) in &db.trait_data(tr).items {
|
for &(ref assoc_item_name, item) in &db.trait_data(tr).items {
|
||||||
let module_def_id = match item {
|
let module_def_id = match item {
|
||||||
AssocItemId::FunctionId(f) => ModuleDefId::from(f),
|
AssocItemId::FunctionId(f) => ModuleDefId::from(f),
|
||||||
|
@ -297,7 +297,7 @@ impl SearchMode {
|
||||||
SearchMode::Exact => candidate.eq_ignore_ascii_case(query),
|
SearchMode::Exact => candidate.eq_ignore_ascii_case(query),
|
||||||
SearchMode::Prefix => {
|
SearchMode::Prefix => {
|
||||||
query.len() <= candidate.len() && {
|
query.len() <= candidate.len() && {
|
||||||
let prefix = &candidate[..query.len() as usize];
|
let prefix = &candidate[..query.len()];
|
||||||
if case_sensitive {
|
if case_sensitive {
|
||||||
prefix == query
|
prefix == query
|
||||||
} else {
|
} else {
|
||||||
|
@ -396,9 +396,9 @@ impl Query {
|
||||||
pub fn search_dependencies(
|
pub fn search_dependencies(
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
ref query: Query,
|
query: &Query,
|
||||||
) -> FxHashSet<ItemInNs> {
|
) -> FxHashSet<ItemInNs> {
|
||||||
let _p = profile::span("search_dependencies").detail(|| format!("{query:?}"));
|
let _p = tracing::span!(tracing::Level::INFO, "search_dependencies", ?query).entered();
|
||||||
|
|
||||||
let graph = db.crate_graph();
|
let graph = db.crate_graph();
|
||||||
|
|
||||||
|
@ -446,7 +446,7 @@ fn search_maps(
|
||||||
let end = (value & 0xFFFF_FFFF) as usize;
|
let end = (value & 0xFFFF_FFFF) as usize;
|
||||||
let start = (value >> 32) as usize;
|
let start = (value >> 32) as usize;
|
||||||
let ImportMap { item_to_info_map, importables, .. } = &*import_maps[import_map_idx];
|
let ImportMap { item_to_info_map, importables, .. } = &*import_maps[import_map_idx];
|
||||||
let importables = &importables[start as usize..end];
|
let importables = &importables[start..end];
|
||||||
|
|
||||||
let iter = importables
|
let iter = importables
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -516,7 +516,7 @@ mod tests {
|
||||||
})
|
})
|
||||||
.expect("could not find crate");
|
.expect("could not find crate");
|
||||||
|
|
||||||
let actual = search_dependencies(db.upcast(), krate, query)
|
let actual = search_dependencies(db.upcast(), krate, &query)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|dependency| {
|
.filter_map(|dependency| {
|
||||||
let dependency_krate = dependency.krate(db.upcast())?;
|
let dependency_krate = dependency.krate(db.upcast())?;
|
||||||
|
|
|
@ -17,7 +17,7 @@ use syntax::ast;
|
||||||
use crate::{
|
use crate::{
|
||||||
db::DefDatabase,
|
db::DefDatabase,
|
||||||
per_ns::PerNs,
|
per_ns::PerNs,
|
||||||
visibility::{Visibility, VisibilityExplicity},
|
visibility::{Visibility, VisibilityExplicitness},
|
||||||
AdtId, BuiltinType, ConstId, ExternCrateId, HasModule, ImplId, LocalModuleId, Lookup, MacroId,
|
AdtId, BuiltinType, ConstId, ExternCrateId, HasModule, ImplId, LocalModuleId, Lookup, MacroId,
|
||||||
ModuleDefId, ModuleId, TraitId, UseId,
|
ModuleDefId, ModuleId, TraitId, UseId,
|
||||||
};
|
};
|
||||||
|
@ -336,6 +336,12 @@ impl ItemScope {
|
||||||
pub(crate) fn macro_invoc(&self, call: AstId<ast::MacroCall>) -> Option<MacroCallId> {
|
pub(crate) fn macro_invoc(&self, call: AstId<ast::MacroCall>) -> Option<MacroCallId> {
|
||||||
self.macro_invocations.get(&call).copied()
|
self.macro_invocations.get(&call).copied()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn iter_macro_invoc(
|
||||||
|
&self,
|
||||||
|
) -> impl Iterator<Item = (&AstId<ast::MacroCall>, &MacroCallId)> {
|
||||||
|
self.macro_invocations.iter()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ItemScope {
|
impl ItemScope {
|
||||||
|
@ -647,14 +653,16 @@ impl ItemScope {
|
||||||
.map(|(_, vis, _)| vis)
|
.map(|(_, vis, _)| vis)
|
||||||
.chain(self.values.values_mut().map(|(_, vis, _)| vis))
|
.chain(self.values.values_mut().map(|(_, vis, _)| vis))
|
||||||
.chain(self.unnamed_trait_imports.values_mut().map(|(vis, _)| vis))
|
.chain(self.unnamed_trait_imports.values_mut().map(|(vis, _)| vis))
|
||||||
.for_each(|vis| *vis = Visibility::Module(this_module, VisibilityExplicity::Implicit));
|
.for_each(|vis| {
|
||||||
|
*vis = Visibility::Module(this_module, VisibilityExplicitness::Implicit)
|
||||||
|
});
|
||||||
|
|
||||||
for (mac, vis, import) in self.macros.values_mut() {
|
for (mac, vis, import) in self.macros.values_mut() {
|
||||||
if matches!(mac, MacroId::ProcMacroId(_) if import.is_none()) {
|
if matches!(mac, MacroId::ProcMacroId(_) if import.is_none()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
*vis = Visibility::Module(this_module, VisibilityExplicity::Implicit);
|
*vis = Visibility::Module(this_module, VisibilityExplicitness::Implicit);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -69,7 +69,7 @@ use crate::{
|
||||||
generics::{GenericParams, LifetimeParamData, TypeOrConstParamData},
|
generics::{GenericParams, LifetimeParamData, TypeOrConstParamData},
|
||||||
path::{path, AssociatedTypeBinding, GenericArgs, ImportAlias, ModPath, Path, PathKind},
|
path::{path, AssociatedTypeBinding, GenericArgs, ImportAlias, ModPath, Path, PathKind},
|
||||||
type_ref::{Mutability, TraitRef, TypeBound, TypeRef},
|
type_ref::{Mutability, TraitRef, TypeBound, TypeRef},
|
||||||
visibility::{RawVisibility, VisibilityExplicity},
|
visibility::{RawVisibility, VisibilityExplicitness},
|
||||||
BlockId, Lookup,
|
BlockId, Lookup,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -109,7 +109,8 @@ pub struct ItemTree {
|
||||||
|
|
||||||
impl ItemTree {
|
impl ItemTree {
|
||||||
pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
|
pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
|
||||||
let _p = profile::span("file_item_tree_query").detail(|| format!("{file_id:?}"));
|
let _p = tracing::span!(tracing::Level::INFO, "file_item_tree_query", ?file_id).entered();
|
||||||
|
|
||||||
let syntax = db.parse_or_expand(file_id);
|
let syntax = db.parse_or_expand(file_id);
|
||||||
|
|
||||||
let ctx = lower::Ctx::new(db, file_id);
|
let ctx = lower::Ctx::new(db, file_id);
|
||||||
|
@ -252,10 +253,10 @@ impl ItemVisibilities {
|
||||||
RawVisibility::Public => RawVisibilityId::PUB,
|
RawVisibility::Public => RawVisibilityId::PUB,
|
||||||
RawVisibility::Module(path, explicitiy) if path.segments().is_empty() => {
|
RawVisibility::Module(path, explicitiy) if path.segments().is_empty() => {
|
||||||
match (&path.kind, explicitiy) {
|
match (&path.kind, explicitiy) {
|
||||||
(PathKind::Super(0), VisibilityExplicity::Explicit) => {
|
(PathKind::Super(0), VisibilityExplicitness::Explicit) => {
|
||||||
RawVisibilityId::PRIV_EXPLICIT
|
RawVisibilityId::PRIV_EXPLICIT
|
||||||
}
|
}
|
||||||
(PathKind::Super(0), VisibilityExplicity::Implicit) => {
|
(PathKind::Super(0), VisibilityExplicitness::Implicit) => {
|
||||||
RawVisibilityId::PRIV_IMPLICIT
|
RawVisibilityId::PRIV_IMPLICIT
|
||||||
}
|
}
|
||||||
(PathKind::Crate, _) => RawVisibilityId::PUB_CRATE,
|
(PathKind::Crate, _) => RawVisibilityId::PUB_CRATE,
|
||||||
|
@ -269,11 +270,11 @@ impl ItemVisibilities {
|
||||||
|
|
||||||
static VIS_PUB: RawVisibility = RawVisibility::Public;
|
static VIS_PUB: RawVisibility = RawVisibility::Public;
|
||||||
static VIS_PRIV_IMPLICIT: RawVisibility =
|
static VIS_PRIV_IMPLICIT: RawVisibility =
|
||||||
RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)), VisibilityExplicity::Implicit);
|
RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)), VisibilityExplicitness::Implicit);
|
||||||
static VIS_PRIV_EXPLICIT: RawVisibility =
|
static VIS_PRIV_EXPLICIT: RawVisibility =
|
||||||
RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)), VisibilityExplicity::Explicit);
|
RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)), VisibilityExplicitness::Explicit);
|
||||||
static VIS_PUB_CRATE: RawVisibility =
|
static VIS_PUB_CRATE: RawVisibility =
|
||||||
RawVisibility::Module(ModPath::from_kind(PathKind::Crate), VisibilityExplicity::Explicit);
|
RawVisibility::Module(ModPath::from_kind(PathKind::Crate), VisibilityExplicitness::Explicit);
|
||||||
|
|
||||||
#[derive(Default, Debug, Eq, PartialEq)]
|
#[derive(Default, Debug, Eq, PartialEq)]
|
||||||
struct ItemTreeData {
|
struct ItemTreeData {
|
||||||
|
|
|
@ -91,7 +91,7 @@ impl LangItems {
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
) -> Option<Arc<LangItems>> {
|
) -> Option<Arc<LangItems>> {
|
||||||
let _p = profile::span("crate_lang_items_query");
|
let _p = tracing::span!(tracing::Level::INFO, "crate_lang_items_query").entered();
|
||||||
|
|
||||||
let mut lang_items = LangItems::default();
|
let mut lang_items = LangItems::default();
|
||||||
|
|
||||||
|
@ -163,7 +163,7 @@ impl LangItems {
|
||||||
start_crate: CrateId,
|
start_crate: CrateId,
|
||||||
item: LangItem,
|
item: LangItem,
|
||||||
) -> Option<LangItemTarget> {
|
) -> Option<LangItemTarget> {
|
||||||
let _p = profile::span("lang_item_query");
|
let _p = tracing::span!(tracing::Level::INFO, "lang_item_query").entered();
|
||||||
if let Some(target) =
|
if let Some(target) =
|
||||||
db.crate_lang_items(start_crate).and_then(|it| it.items.get(&item).copied())
|
db.crate_lang_items(start_crate).and_then(|it| it.items.get(&item).copied())
|
||||||
{
|
{
|
||||||
|
@ -183,7 +183,7 @@ impl LangItems {
|
||||||
) where
|
) where
|
||||||
T: Into<AttrDefId> + Copy,
|
T: Into<AttrDefId> + Copy,
|
||||||
{
|
{
|
||||||
let _p = profile::span("collect_lang_item");
|
let _p = tracing::span!(tracing::Level::INFO, "collect_lang_item").entered();
|
||||||
if let Some(lang_item) = lang_attr(db, item.into()) {
|
if let Some(lang_item) = lang_attr(db, item.into()) {
|
||||||
self.items.entry(lang_item).or_insert_with(|| constructor(item));
|
self.items.entry(lang_item).or_insert_with(|| constructor(item));
|
||||||
}
|
}
|
||||||
|
@ -199,7 +199,7 @@ pub(crate) fn notable_traits_in_deps(
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
) -> Arc<[Arc<[TraitId]>]> {
|
) -> Arc<[Arc<[TraitId]>]> {
|
||||||
let _p = profile::span("notable_traits_in_deps").detail(|| format!("{krate:?}"));
|
let _p = tracing::span!(tracing::Level::INFO, "notable_traits_in_deps", ?krate).entered();
|
||||||
let crate_graph = db.crate_graph();
|
let crate_graph = db.crate_graph();
|
||||||
|
|
||||||
Arc::from_iter(
|
Arc::from_iter(
|
||||||
|
@ -208,7 +208,7 @@ pub(crate) fn notable_traits_in_deps(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: CrateId) -> Option<Arc<[TraitId]>> {
|
pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: CrateId) -> Option<Arc<[TraitId]>> {
|
||||||
let _p = profile::span("crate_notable_traits").detail(|| format!("{krate:?}"));
|
let _p = tracing::span!(tracing::Level::INFO, "crate_notable_traits", ?krate).entered();
|
||||||
|
|
||||||
let mut traits = Vec::new();
|
let mut traits = Vec::new();
|
||||||
|
|
||||||
|
|
|
@ -25,13 +25,13 @@ extern crate ra_ap_rustc_abi as rustc_abi;
|
||||||
pub mod db;
|
pub mod db;
|
||||||
|
|
||||||
pub mod attr;
|
pub mod attr;
|
||||||
pub mod path;
|
|
||||||
pub mod builtin_type;
|
pub mod builtin_type;
|
||||||
pub mod per_ns;
|
|
||||||
pub mod item_scope;
|
pub mod item_scope;
|
||||||
|
pub mod path;
|
||||||
|
pub mod per_ns;
|
||||||
|
|
||||||
pub mod lower;
|
|
||||||
pub mod expander;
|
pub mod expander;
|
||||||
|
pub mod lower;
|
||||||
|
|
||||||
pub mod dyn_map;
|
pub mod dyn_map;
|
||||||
|
|
||||||
|
@ -46,24 +46,24 @@ pub use self::hir::type_ref;
|
||||||
pub mod body;
|
pub mod body;
|
||||||
pub mod resolver;
|
pub mod resolver;
|
||||||
|
|
||||||
mod trace;
|
|
||||||
pub mod nameres;
|
pub mod nameres;
|
||||||
|
mod trace;
|
||||||
|
|
||||||
pub mod src;
|
|
||||||
pub mod child_by_source;
|
pub mod child_by_source;
|
||||||
|
pub mod src;
|
||||||
|
|
||||||
pub mod visibility;
|
|
||||||
pub mod find_path;
|
pub mod find_path;
|
||||||
pub mod import_map;
|
pub mod import_map;
|
||||||
|
pub mod visibility;
|
||||||
|
|
||||||
pub use rustc_abi as layout;
|
pub use rustc_abi as layout;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test_db;
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod macro_expansion_tests;
|
mod macro_expansion_tests;
|
||||||
mod pretty;
|
mod pretty;
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test_db;
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
hash::{Hash, Hasher},
|
hash::{Hash, Hasher},
|
||||||
|
@ -73,7 +73,6 @@ use std::{
|
||||||
use base_db::{impl_intern_key, salsa, CrateId, Edition};
|
use base_db::{impl_intern_key, salsa, CrateId, Edition};
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
ast_id_map::{AstIdNode, FileAstId},
|
ast_id_map::{AstIdNode, FileAstId},
|
||||||
attrs::{Attr, AttrId, AttrInput},
|
|
||||||
builtin_attr_macro::BuiltinAttrExpander,
|
builtin_attr_macro::BuiltinAttrExpander,
|
||||||
builtin_derive_macro::BuiltinDeriveExpander,
|
builtin_derive_macro::BuiltinDeriveExpander,
|
||||||
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
|
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
|
||||||
|
@ -746,7 +745,7 @@ impl InTypeConstId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A constant, which might appears as a const item, an annonymous const block in expressions
|
/// A constant, which might appears as a const item, an anonymous const block in expressions
|
||||||
/// or patterns, or as a constant in types with const generics.
|
/// or patterns, or as a constant in types with const generics.
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
pub enum GeneralConstId {
|
pub enum GeneralConstId {
|
||||||
|
@ -939,6 +938,15 @@ impl From<AssocItemId> for AttrDefId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
impl From<VariantId> for AttrDefId {
|
||||||
|
fn from(vid: VariantId) -> Self {
|
||||||
|
match vid {
|
||||||
|
VariantId::EnumVariantId(id) => id.into(),
|
||||||
|
VariantId::StructId(id) => id.into(),
|
||||||
|
VariantId::UnionId(id) => id.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
pub enum VariantId {
|
pub enum VariantId {
|
||||||
|
@ -1265,60 +1273,6 @@ fn macro_call_as_call_id_with_eager(
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn derive_macro_as_call_id(
|
|
||||||
db: &dyn DefDatabase,
|
|
||||||
item_attr: &AstIdWithPath<ast::Adt>,
|
|
||||||
derive_attr_index: AttrId,
|
|
||||||
derive_pos: u32,
|
|
||||||
call_site: Span,
|
|
||||||
krate: CrateId,
|
|
||||||
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
|
|
||||||
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
|
|
||||||
let (macro_id, def_id) = resolver(item_attr.path.clone())
|
|
||||||
.filter(|(_, def_id)| def_id.is_derive())
|
|
||||||
.ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?;
|
|
||||||
let call_id = def_id.as_lazy_macro(
|
|
||||||
db.upcast(),
|
|
||||||
krate,
|
|
||||||
MacroCallKind::Derive {
|
|
||||||
ast_id: item_attr.ast_id,
|
|
||||||
derive_index: derive_pos,
|
|
||||||
derive_attr_index,
|
|
||||||
},
|
|
||||||
call_site,
|
|
||||||
);
|
|
||||||
Ok((macro_id, def_id, call_id))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn attr_macro_as_call_id(
|
|
||||||
db: &dyn DefDatabase,
|
|
||||||
item_attr: &AstIdWithPath<ast::Item>,
|
|
||||||
macro_attr: &Attr,
|
|
||||||
krate: CrateId,
|
|
||||||
def: MacroDefId,
|
|
||||||
) -> MacroCallId {
|
|
||||||
let arg = match macro_attr.input.as_deref() {
|
|
||||||
Some(AttrInput::TokenTree(tt)) => {
|
|
||||||
let mut tt = tt.as_ref().clone();
|
|
||||||
tt.delimiter = tt::Delimiter::invisible_spanned(macro_attr.span);
|
|
||||||
Some(tt)
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
def.as_lazy_macro(
|
|
||||||
db.upcast(),
|
|
||||||
krate,
|
|
||||||
MacroCallKind::Attr {
|
|
||||||
ast_id: item_attr.ast_id,
|
|
||||||
attr_args: arg.map(Arc::new),
|
|
||||||
invoc_attr_index: macro_attr.id,
|
|
||||||
},
|
|
||||||
macro_attr.span,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct UnresolvedMacro {
|
pub struct UnresolvedMacro {
|
||||||
pub path: hir_expand::mod_path::ModPath,
|
pub path: hir_expand::mod_path::ModPath,
|
||||||
|
|
|
@ -460,13 +460,13 @@ fn test_concat_expand() {
|
||||||
#[rustc_builtin_macro]
|
#[rustc_builtin_macro]
|
||||||
macro_rules! concat {}
|
macro_rules! concat {}
|
||||||
|
|
||||||
fn main() { concat!("foo", "r", 0, r#"bar"#, "\n", false, '"', '\0'); }
|
fn main() { concat!("fo", "o", 0, r#"bar"#, "\n", false, '"', '\0'); }
|
||||||
"##,
|
"##,
|
||||||
expect![[r##"
|
expect![[r##"
|
||||||
#[rustc_builtin_macro]
|
#[rustc_builtin_macro]
|
||||||
macro_rules! concat {}
|
macro_rules! concat {}
|
||||||
|
|
||||||
fn main() { "foor0bar\nfalse\"\u{0}"; }
|
fn main() { "foo0bar\nfalse\"\u{0}"; }
|
||||||
"##]],
|
"##]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
//! Tests specific to declarative macros, aka macros by example. This covers
|
//! Tests specific to declarative macros, aka macros by example. This covers
|
||||||
//! both stable `macro_rules!` macros as well as unstable `macro` macros.
|
//! both stable `macro_rules!` macros as well as unstable `macro` macros.
|
||||||
|
|
||||||
mod tt_conversion;
|
|
||||||
mod matching;
|
mod matching;
|
||||||
mod meta_syntax;
|
mod meta_syntax;
|
||||||
mod metavar_expr;
|
mod metavar_expr;
|
||||||
mod regression;
|
mod regression;
|
||||||
|
mod tt_conversion;
|
||||||
|
|
||||||
use expect_test::expect;
|
use expect_test::expect;
|
||||||
|
|
||||||
|
|
|
@ -72,7 +72,7 @@ fn main() {
|
||||||
}
|
}
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
// FIXME we should ahev testing infra for multi level expansion tests
|
// FIXME we should have testing infra for multi level expansion tests
|
||||||
check(
|
check(
|
||||||
r#"
|
r#"
|
||||||
macro_rules! __rust_force_expr {
|
macro_rules! __rust_force_expr {
|
||||||
|
@ -544,11 +544,11 @@ fn test_proptest_arbitrary() {
|
||||||
check(
|
check(
|
||||||
r#"
|
r#"
|
||||||
macro_rules! arbitrary {
|
macro_rules! arbitrary {
|
||||||
([$($bounds : tt)*] $typ: ty, $strat: ty, $params: ty;
|
([$($bounds : tt)*] $typ: ty, $strategy: ty, $params: ty;
|
||||||
$args: ident => $logic: expr) => {
|
$args: ident => $logic: expr) => {
|
||||||
impl<$($bounds)*> $crate::arbitrary::Arbitrary for $typ {
|
impl<$($bounds)*> $crate::arbitrary::Arbitrary for $typ {
|
||||||
type Parameters = $params;
|
type Parameters = $params;
|
||||||
type Strategy = $strat;
|
type Strategy = $strategy;
|
||||||
fn arbitrary_with($args: Self::Parameters) -> Self::Strategy {
|
fn arbitrary_with($args: Self::Parameters) -> Self::Strategy {
|
||||||
$logic
|
$logic
|
||||||
}
|
}
|
||||||
|
@ -569,11 +569,11 @@ arbitrary!(
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
macro_rules! arbitrary {
|
macro_rules! arbitrary {
|
||||||
([$($bounds : tt)*] $typ: ty, $strat: ty, $params: ty;
|
([$($bounds : tt)*] $typ: ty, $strategy: ty, $params: ty;
|
||||||
$args: ident => $logic: expr) => {
|
$args: ident => $logic: expr) => {
|
||||||
impl<$($bounds)*> $crate::arbitrary::Arbitrary for $typ {
|
impl<$($bounds)*> $crate::arbitrary::Arbitrary for $typ {
|
||||||
type Parameters = $params;
|
type Parameters = $params;
|
||||||
type Strategy = $strat;
|
type Strategy = $strategy;
|
||||||
fn arbitrary_with($args: Self::Parameters) -> Self::Strategy {
|
fn arbitrary_with($args: Self::Parameters) -> Self::Strategy {
|
||||||
$logic
|
$logic
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,9 +9,9 @@
|
||||||
//! write unit-tests (in fact, we used to do that), but that makes tests brittle
|
//! write unit-tests (in fact, we used to do that), but that makes tests brittle
|
||||||
//! and harder to understand.
|
//! and harder to understand.
|
||||||
|
|
||||||
mod mbe;
|
|
||||||
mod builtin_fn_macro;
|
|
||||||
mod builtin_derive_macro;
|
mod builtin_derive_macro;
|
||||||
|
mod builtin_fn_macro;
|
||||||
|
mod mbe;
|
||||||
mod proc_macros;
|
mod proc_macros;
|
||||||
|
|
||||||
use std::{iter, ops::Range, sync};
|
use std::{iter, ops::Range, sync};
|
||||||
|
@ -25,7 +25,7 @@ use hir_expand::{
|
||||||
InFile, MacroFileId, MacroFileIdExt,
|
InFile, MacroFileId, MacroFileIdExt,
|
||||||
};
|
};
|
||||||
use span::Span;
|
use span::Span;
|
||||||
use stdx::format_to;
|
use stdx::{format_to, format_to_acc};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, edit::IndentLevel},
|
ast::{self, edit::IndentLevel},
|
||||||
AstNode,
|
AstNode,
|
||||||
|
@ -149,8 +149,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
||||||
if tree {
|
if tree {
|
||||||
let tree = format!("{:#?}", parse.syntax_node())
|
let tree = format!("{:#?}", parse.syntax_node())
|
||||||
.split_inclusive('\n')
|
.split_inclusive('\n')
|
||||||
.map(|line| format!("// {line}"))
|
.fold(String::new(), |mut acc, line| format_to_acc!(acc, "// {line}"));
|
||||||
.collect::<String>();
|
|
||||||
format_to!(expn_text, "\n{}", tree)
|
format_to!(expn_text, "\n{}", tree)
|
||||||
}
|
}
|
||||||
let range = call.syntax().text_range();
|
let range = call.syntax().text_range();
|
||||||
|
|
|
@ -48,11 +48,11 @@
|
||||||
//! the result
|
//! the result
|
||||||
|
|
||||||
pub mod attr_resolution;
|
pub mod attr_resolution;
|
||||||
pub mod proc_macro;
|
|
||||||
pub mod diagnostics;
|
|
||||||
mod collector;
|
mod collector;
|
||||||
|
pub mod diagnostics;
|
||||||
mod mod_resolution;
|
mod mod_resolution;
|
||||||
mod path_resolution;
|
mod path_resolution;
|
||||||
|
pub mod proc_macro;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
@ -79,7 +79,7 @@ use crate::{
|
||||||
nameres::{diagnostics::DefDiagnostic, path_resolution::ResolveMode},
|
nameres::{diagnostics::DefDiagnostic, path_resolution::ResolveMode},
|
||||||
path::ModPath,
|
path::ModPath,
|
||||||
per_ns::PerNs,
|
per_ns::PerNs,
|
||||||
visibility::{Visibility, VisibilityExplicity},
|
visibility::{Visibility, VisibilityExplicitness},
|
||||||
AstId, BlockId, BlockLoc, CrateRootModuleId, EnumId, EnumVariantId, ExternCrateId, FunctionId,
|
AstId, BlockId, BlockLoc, CrateRootModuleId, EnumId, EnumVariantId, ExternCrateId, FunctionId,
|
||||||
LocalModuleId, Lookup, MacroExpander, MacroId, ModuleId, ProcMacroId, UseId,
|
LocalModuleId, Lookup, MacroExpander, MacroId, ModuleId, ProcMacroId, UseId,
|
||||||
};
|
};
|
||||||
|
@ -306,9 +306,10 @@ impl DefMap {
|
||||||
pub const ROOT: LocalModuleId = LocalModuleId::from_raw(la_arena::RawIdx::from_u32(0));
|
pub const ROOT: LocalModuleId = LocalModuleId::from_raw(la_arena::RawIdx::from_u32(0));
|
||||||
|
|
||||||
pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> {
|
pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> {
|
||||||
let _p = profile::span("crate_def_map_query").detail(|| {
|
let crate_graph = db.crate_graph();
|
||||||
db.crate_graph()[krate].display_name.as_deref().unwrap_or_default().to_string()
|
let krate_name = crate_graph[krate].display_name.as_deref().unwrap_or_default();
|
||||||
});
|
|
||||||
|
let _p = tracing::span!(tracing::Level::INFO, "crate_def_map_query", ?krate_name).entered();
|
||||||
|
|
||||||
let crate_graph = db.crate_graph();
|
let crate_graph = db.crate_graph();
|
||||||
|
|
||||||
|
@ -335,7 +336,7 @@ impl DefMap {
|
||||||
// this visibility for anything outside IDE, so that's probably OK.
|
// this visibility for anything outside IDE, so that's probably OK.
|
||||||
let visibility = Visibility::Module(
|
let visibility = Visibility::Module(
|
||||||
ModuleId { krate, local_id, block: None },
|
ModuleId { krate, local_id, block: None },
|
||||||
VisibilityExplicity::Implicit,
|
VisibilityExplicitness::Implicit,
|
||||||
);
|
);
|
||||||
let module_data = ModuleData::new(
|
let module_data = ModuleData::new(
|
||||||
ModuleOrigin::BlockExpr { block: block.ast_id, id: block_id },
|
ModuleOrigin::BlockExpr { block: block.ast_id, id: block_id },
|
||||||
|
|
|
@ -1,16 +1,21 @@
|
||||||
//! Post-nameres attribute resolution.
|
//! Post-nameres attribute resolution.
|
||||||
|
|
||||||
use hir_expand::{attrs::Attr, MacroCallId};
|
use base_db::CrateId;
|
||||||
|
use hir_expand::{
|
||||||
|
attrs::{Attr, AttrId, AttrInput},
|
||||||
|
MacroCallId, MacroCallKind, MacroDefId,
|
||||||
|
};
|
||||||
|
use span::Span;
|
||||||
use syntax::{ast, SmolStr};
|
use syntax::{ast, SmolStr};
|
||||||
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
attr::builtin::{find_builtin_attr_idx, TOOL_MODULES},
|
attr::builtin::{find_builtin_attr_idx, TOOL_MODULES},
|
||||||
attr_macro_as_call_id,
|
|
||||||
db::DefDatabase,
|
db::DefDatabase,
|
||||||
item_scope::BuiltinShadowMode,
|
item_scope::BuiltinShadowMode,
|
||||||
nameres::path_resolution::ResolveMode,
|
nameres::path_resolution::ResolveMode,
|
||||||
path::{ModPath, PathKind},
|
path::{self, ModPath, PathKind},
|
||||||
AstIdWithPath, LocalModuleId, UnresolvedMacro,
|
AstIdWithPath, LocalModuleId, MacroId, UnresolvedMacro,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{DefMap, MacroSubNs};
|
use super::{DefMap, MacroSubNs};
|
||||||
|
@ -93,3 +98,57 @@ impl DefMap {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(super) fn attr_macro_as_call_id(
|
||||||
|
db: &dyn DefDatabase,
|
||||||
|
item_attr: &AstIdWithPath<ast::Item>,
|
||||||
|
macro_attr: &Attr,
|
||||||
|
krate: CrateId,
|
||||||
|
def: MacroDefId,
|
||||||
|
) -> MacroCallId {
|
||||||
|
let arg = match macro_attr.input.as_deref() {
|
||||||
|
Some(AttrInput::TokenTree(tt)) => {
|
||||||
|
let mut tt = tt.as_ref().clone();
|
||||||
|
tt.delimiter = tt::Delimiter::invisible_spanned(macro_attr.span);
|
||||||
|
Some(tt)
|
||||||
|
}
|
||||||
|
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
def.as_lazy_macro(
|
||||||
|
db.upcast(),
|
||||||
|
krate,
|
||||||
|
MacroCallKind::Attr {
|
||||||
|
ast_id: item_attr.ast_id,
|
||||||
|
attr_args: arg.map(Arc::new),
|
||||||
|
invoc_attr_index: macro_attr.id,
|
||||||
|
},
|
||||||
|
macro_attr.span,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn derive_macro_as_call_id(
|
||||||
|
db: &dyn DefDatabase,
|
||||||
|
item_attr: &AstIdWithPath<ast::Adt>,
|
||||||
|
derive_attr_index: AttrId,
|
||||||
|
derive_pos: u32,
|
||||||
|
call_site: Span,
|
||||||
|
krate: CrateId,
|
||||||
|
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
|
||||||
|
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
|
||||||
|
let (macro_id, def_id) = resolver(item_attr.path.clone())
|
||||||
|
.filter(|(_, def_id)| def_id.is_derive())
|
||||||
|
.ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?;
|
||||||
|
let call_id = def_id.as_lazy_macro(
|
||||||
|
db.upcast(),
|
||||||
|
krate,
|
||||||
|
MacroCallKind::Derive {
|
||||||
|
ast_id: item_attr.ast_id,
|
||||||
|
derive_index: derive_pos,
|
||||||
|
derive_attr_index,
|
||||||
|
},
|
||||||
|
call_site,
|
||||||
|
);
|
||||||
|
Ok((macro_id, def_id, call_id))
|
||||||
|
}
|
||||||
|
|
|
@ -30,9 +30,7 @@ use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
attr::Attrs,
|
attr::Attrs,
|
||||||
attr_macro_as_call_id,
|
|
||||||
db::DefDatabase,
|
db::DefDatabase,
|
||||||
derive_macro_as_call_id,
|
|
||||||
item_scope::{ImportId, ImportOrExternCrate, ImportType, PerNsGlobImports},
|
item_scope::{ImportId, ImportOrExternCrate, ImportType, PerNsGlobImports},
|
||||||
item_tree::{
|
item_tree::{
|
||||||
self, ExternCrate, Fields, FileItemTreeId, ImportKind, ItemTree, ItemTreeId,
|
self, ExternCrate, Fields, FileItemTreeId, ImportKind, ItemTree, ItemTreeId,
|
||||||
|
@ -40,6 +38,7 @@ use crate::{
|
||||||
},
|
},
|
||||||
macro_call_as_call_id, macro_call_as_call_id_with_eager,
|
macro_call_as_call_id, macro_call_as_call_id_with_eager,
|
||||||
nameres::{
|
nameres::{
|
||||||
|
attr_resolution::{attr_macro_as_call_id, derive_macro_as_call_id},
|
||||||
diagnostics::DefDiagnostic,
|
diagnostics::DefDiagnostic,
|
||||||
mod_resolution::ModDir,
|
mod_resolution::ModDir,
|
||||||
path_resolution::ReachedFixedPoint,
|
path_resolution::ReachedFixedPoint,
|
||||||
|
@ -274,7 +273,7 @@ struct DefCollector<'a> {
|
||||||
|
|
||||||
impl DefCollector<'_> {
|
impl DefCollector<'_> {
|
||||||
fn seed_with_top_level(&mut self) {
|
fn seed_with_top_level(&mut self) {
|
||||||
let _p = profile::span("seed_with_top_level");
|
let _p = tracing::span!(tracing::Level::INFO, "seed_with_top_level").entered();
|
||||||
|
|
||||||
let file_id = self.db.crate_graph()[self.def_map.krate].root_file_id;
|
let file_id = self.db.crate_graph()[self.def_map.krate].root_file_id;
|
||||||
let item_tree = self.db.file_item_tree(file_id.into());
|
let item_tree = self.db.file_item_tree(file_id.into());
|
||||||
|
@ -402,7 +401,7 @@ impl DefCollector<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolution_loop(&mut self) {
|
fn resolution_loop(&mut self) {
|
||||||
let _p = profile::span("DefCollector::resolution_loop");
|
let _p = tracing::span!(tracing::Level::INFO, "DefCollector::resolution_loop").entered();
|
||||||
|
|
||||||
// main name resolution fixed-point loop.
|
// main name resolution fixed-point loop.
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
|
@ -411,7 +410,7 @@ impl DefCollector<'_> {
|
||||||
self.db.unwind_if_cancelled();
|
self.db.unwind_if_cancelled();
|
||||||
|
|
||||||
{
|
{
|
||||||
let _p = profile::span("resolve_imports loop");
|
let _p = tracing::span!(tracing::Level::INFO, "resolve_imports loop").entered();
|
||||||
|
|
||||||
'resolve_imports: loop {
|
'resolve_imports: loop {
|
||||||
if self.resolve_imports() == ReachedFixedPoint::Yes {
|
if self.resolve_imports() == ReachedFixedPoint::Yes {
|
||||||
|
@ -437,7 +436,7 @@ impl DefCollector<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect(&mut self) {
|
fn collect(&mut self) {
|
||||||
let _p = profile::span("DefCollector::collect");
|
let _p = tracing::span!(tracing::Level::INFO, "DefCollector::collect").entered();
|
||||||
|
|
||||||
self.resolution_loop();
|
self.resolution_loop();
|
||||||
|
|
||||||
|
@ -793,8 +792,8 @@ impl DefCollector<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialResolvedImport {
|
fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialResolvedImport {
|
||||||
let _p = profile::span("resolve_import")
|
let _p = tracing::span!(tracing::Level::INFO, "resolve_import", import_path = %import.path.display(self.db.upcast()))
|
||||||
.detail(|| format!("{}", import.path.display(self.db.upcast())));
|
.entered();
|
||||||
tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition);
|
tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition);
|
||||||
match import.source {
|
match import.source {
|
||||||
ImportSource::ExternCrate { .. } => {
|
ImportSource::ExternCrate { .. } => {
|
||||||
|
@ -857,7 +856,7 @@ impl DefCollector<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn record_resolved_import(&mut self, directive: &ImportDirective) {
|
fn record_resolved_import(&mut self, directive: &ImportDirective) {
|
||||||
let _p = profile::span("record_resolved_import");
|
let _p = tracing::span!(tracing::Level::INFO, "record_resolved_import").entered();
|
||||||
|
|
||||||
let module_id = directive.module_id;
|
let module_id = directive.module_id;
|
||||||
let import = &directive.import;
|
let import = &directive.import;
|
||||||
|
@ -1245,7 +1244,9 @@ impl DefCollector<'_> {
|
||||||
MacroDefId { kind: MacroDefKind::BuiltInAttr(expander, _),.. }
|
MacroDefId { kind: MacroDefKind::BuiltInAttr(expander, _),.. }
|
||||||
if expander.is_derive()
|
if expander.is_derive()
|
||||||
) {
|
) {
|
||||||
// Resolved to `#[derive]`
|
// Resolved to `#[derive]`, we don't actually expand this attribute like
|
||||||
|
// normal (as that would just be an identity expansion with extra output)
|
||||||
|
// Instead we treat derive attributes special and apply them separately.
|
||||||
|
|
||||||
let item_tree = tree.item_tree(self.db);
|
let item_tree = tree.item_tree(self.db);
|
||||||
let ast_adt_id: FileAstId<ast::Adt> = match *mod_item {
|
let ast_adt_id: FileAstId<ast::Adt> = match *mod_item {
|
||||||
|
@ -1284,7 +1285,8 @@ impl DefCollector<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// We treat the #[derive] macro as an attribute call, but we do not resolve it for nameres collection.
|
// We treat the #[derive] macro as an attribute call, but we do not resolve it for nameres collection.
|
||||||
// This is just a trick to be able to resolve the input to derives as proper paths.
|
// This is just a trick to be able to resolve the input to derives
|
||||||
|
// as proper paths in `Semantics`.
|
||||||
// Check the comment in [`builtin_attr_macro`].
|
// Check the comment in [`builtin_attr_macro`].
|
||||||
let call_id = attr_macro_as_call_id(
|
let call_id = attr_macro_as_call_id(
|
||||||
self.db,
|
self.db,
|
||||||
|
@ -1428,7 +1430,7 @@ impl DefCollector<'_> {
|
||||||
fn finish(mut self) -> DefMap {
|
fn finish(mut self) -> DefMap {
|
||||||
// Emit diagnostics for all remaining unexpanded macros.
|
// Emit diagnostics for all remaining unexpanded macros.
|
||||||
|
|
||||||
let _p = profile::span("DefCollector::finish");
|
let _p = tracing::span!(tracing::Level::INFO, "DefCollector::finish").entered();
|
||||||
|
|
||||||
for directive in &self.unresolved_macros {
|
for directive in &self.unresolved_macros {
|
||||||
match &directive.kind {
|
match &directive.kind {
|
||||||
|
@ -1922,7 +1924,7 @@ impl ModCollector<'_, '_> {
|
||||||
item_tree: self.item_tree,
|
item_tree: self.item_tree,
|
||||||
mod_dir,
|
mod_dir,
|
||||||
}
|
}
|
||||||
.collect_in_top_module(&*items);
|
.collect_in_top_module(items);
|
||||||
if is_macro_use {
|
if is_macro_use {
|
||||||
self.import_all_legacy_macros(module_id);
|
self.import_all_legacy_macros(module_id);
|
||||||
}
|
}
|
||||||
|
|
|
@ -87,7 +87,7 @@ impl DefMap {
|
||||||
within_impl: bool,
|
within_impl: bool,
|
||||||
) -> Option<Visibility> {
|
) -> Option<Visibility> {
|
||||||
let mut vis = match visibility {
|
let mut vis = match visibility {
|
||||||
RawVisibility::Module(path, explicity) => {
|
RawVisibility::Module(path, explicitness) => {
|
||||||
let (result, remaining) =
|
let (result, remaining) =
|
||||||
self.resolve_path(db, original_module, path, BuiltinShadowMode::Module, None);
|
self.resolve_path(db, original_module, path, BuiltinShadowMode::Module, None);
|
||||||
if remaining.is_some() {
|
if remaining.is_some() {
|
||||||
|
@ -95,7 +95,7 @@ impl DefMap {
|
||||||
}
|
}
|
||||||
let types = result.take_types()?;
|
let types = result.take_types()?;
|
||||||
match types {
|
match types {
|
||||||
ModuleDefId::ModuleId(m) => Visibility::Module(m, *explicity),
|
ModuleDefId::ModuleId(m) => Visibility::Module(m, *explicitness),
|
||||||
// error: visibility needs to refer to module
|
// error: visibility needs to refer to module
|
||||||
_ => {
|
_ => {
|
||||||
return None;
|
return None;
|
||||||
|
@ -269,7 +269,7 @@ impl DefMap {
|
||||||
stdx::never!(module.is_block_module());
|
stdx::never!(module.is_block_module());
|
||||||
|
|
||||||
if self.block != def_map.block {
|
if self.block != def_map.block {
|
||||||
// If we have a different `DefMap` from `self` (the orignal `DefMap` we started
|
// If we have a different `DefMap` from `self` (the original `DefMap` we started
|
||||||
// with), resolve the remaining path segments in that `DefMap`.
|
// with), resolve the remaining path segments in that `DefMap`.
|
||||||
let path =
|
let path =
|
||||||
ModPath::from_segments(PathKind::Super(0), path.segments().iter().cloned());
|
ModPath::from_segments(PathKind::Super(0), path.segments().iter().cloned());
|
||||||
|
@ -475,7 +475,7 @@ impl DefMap {
|
||||||
let macro_use_prelude = || {
|
let macro_use_prelude = || {
|
||||||
self.macro_use_prelude.get(name).map_or(PerNs::none(), |&(it, _extern_crate)| {
|
self.macro_use_prelude.get(name).map_or(PerNs::none(), |&(it, _extern_crate)| {
|
||||||
PerNs::macros(
|
PerNs::macros(
|
||||||
it.into(),
|
it,
|
||||||
Visibility::Public,
|
Visibility::Public,
|
||||||
// FIXME?
|
// FIXME?
|
||||||
None, // extern_crate.map(ImportOrExternCrate::ExternCrate),
|
None, // extern_crate.map(ImportOrExternCrate::ExternCrate),
|
||||||
|
@ -540,7 +540,7 @@ impl DefMap {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given a block module, returns its nearest non-block module and the `DefMap` it blongs to.
|
/// Given a block module, returns its nearest non-block module and the `DefMap` it belongs to.
|
||||||
fn adjust_to_nearest_non_block_module(
|
fn adjust_to_nearest_non_block_module(
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
def_map: &DefMap,
|
def_map: &DefMap,
|
||||||
|
|
|
@ -16,19 +16,13 @@ pub enum Namespace {
|
||||||
Macros,
|
Macros,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
#[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)]
|
||||||
pub struct PerNs {
|
pub struct PerNs {
|
||||||
pub types: Option<(ModuleDefId, Visibility, Option<ImportOrExternCrate>)>,
|
pub types: Option<(ModuleDefId, Visibility, Option<ImportOrExternCrate>)>,
|
||||||
pub values: Option<(ModuleDefId, Visibility, Option<ImportId>)>,
|
pub values: Option<(ModuleDefId, Visibility, Option<ImportId>)>,
|
||||||
pub macros: Option<(MacroId, Visibility, Option<ImportId>)>,
|
pub macros: Option<(MacroId, Visibility, Option<ImportId>)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for PerNs {
|
|
||||||
fn default() -> Self {
|
|
||||||
PerNs { types: None, values: None, macros: None }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PerNs {
|
impl PerNs {
|
||||||
pub fn none() -> PerNs {
|
pub fn none() -> PerNs {
|
||||||
PerNs { types: None, values: None, macros: None }
|
PerNs { types: None, values: None, macros: None }
|
||||||
|
@ -92,7 +86,7 @@ impl PerNs {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn filter_visibility(self, mut f: impl FnMut(Visibility) -> bool) -> PerNs {
|
pub fn filter_visibility(self, mut f: impl FnMut(Visibility) -> bool) -> PerNs {
|
||||||
let _p = profile::span("PerNs::filter_visibility");
|
let _p = tracing::span!(tracing::Level::INFO, "PerNs::filter_visibility").entered();
|
||||||
PerNs {
|
PerNs {
|
||||||
types: self.types.filter(|&(_, v, _)| f(v)),
|
types: self.types.filter(|&(_, v, _)| f(v)),
|
||||||
values: self.values.filter(|&(_, v, _)| f(v)),
|
values: self.values.filter(|&(_, v, _)| f(v)),
|
||||||
|
@ -125,19 +119,17 @@ impl PerNs {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn iter_items(self) -> impl Iterator<Item = (ItemInNs, Option<ImportOrExternCrate>)> {
|
pub fn iter_items(self) -> impl Iterator<Item = (ItemInNs, Option<ImportOrExternCrate>)> {
|
||||||
let _p = profile::span("PerNs::iter_items");
|
let _p = tracing::span!(tracing::Level::INFO, "PerNs::iter_items").entered();
|
||||||
self.types
|
self.types
|
||||||
.map(|it| (ItemInNs::Types(it.0), it.2))
|
.map(|it| (ItemInNs::Types(it.0), it.2))
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(
|
.chain(
|
||||||
self.values
|
self.values
|
||||||
.map(|it| (ItemInNs::Values(it.0), it.2.map(ImportOrExternCrate::Import)))
|
.map(|it| (ItemInNs::Values(it.0), it.2.map(ImportOrExternCrate::Import))),
|
||||||
.into_iter(),
|
|
||||||
)
|
)
|
||||||
.chain(
|
.chain(
|
||||||
self.macros
|
self.macros
|
||||||
.map(|it| (ItemInNs::Macros(it.0), it.2.map(ImportOrExternCrate::Import)))
|
.map(|it| (ItemInNs::Macros(it.0), it.2.map(ImportOrExternCrate::Import))),
|
||||||
.into_iter(),
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,13 +41,13 @@ impl Default for TestDB {
|
||||||
|
|
||||||
impl Upcast<dyn ExpandDatabase> for TestDB {
|
impl Upcast<dyn ExpandDatabase> for TestDB {
|
||||||
fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
|
fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
|
||||||
&*self
|
self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Upcast<dyn DefDatabase> for TestDB {
|
impl Upcast<dyn DefDatabase> for TestDB {
|
||||||
fn upcast(&self) -> &(dyn DefDatabase + 'static) {
|
fn upcast(&self) -> &(dyn DefDatabase + 'static) {
|
||||||
&*self
|
self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,14 +20,17 @@ use crate::{
|
||||||
pub enum RawVisibility {
|
pub enum RawVisibility {
|
||||||
/// `pub(in module)`, `pub(crate)` or `pub(super)`. Also private, which is
|
/// `pub(in module)`, `pub(crate)` or `pub(super)`. Also private, which is
|
||||||
/// equivalent to `pub(self)`.
|
/// equivalent to `pub(self)`.
|
||||||
Module(ModPath, VisibilityExplicity),
|
Module(ModPath, VisibilityExplicitness),
|
||||||
/// `pub`.
|
/// `pub`.
|
||||||
Public,
|
Public,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RawVisibility {
|
impl RawVisibility {
|
||||||
pub(crate) const fn private() -> RawVisibility {
|
pub(crate) const fn private() -> RawVisibility {
|
||||||
RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)), VisibilityExplicity::Implicit)
|
RawVisibility::Module(
|
||||||
|
ModPath::from_kind(PathKind::Super(0)),
|
||||||
|
VisibilityExplicitness::Implicit,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn from_ast(
|
pub(crate) fn from_ast(
|
||||||
|
@ -53,19 +56,19 @@ impl RawVisibility {
|
||||||
None => return RawVisibility::private(),
|
None => return RawVisibility::private(),
|
||||||
Some(path) => path,
|
Some(path) => path,
|
||||||
};
|
};
|
||||||
RawVisibility::Module(path, VisibilityExplicity::Explicit)
|
RawVisibility::Module(path, VisibilityExplicitness::Explicit)
|
||||||
}
|
}
|
||||||
ast::VisibilityKind::PubCrate => {
|
ast::VisibilityKind::PubCrate => {
|
||||||
let path = ModPath::from_kind(PathKind::Crate);
|
let path = ModPath::from_kind(PathKind::Crate);
|
||||||
RawVisibility::Module(path, VisibilityExplicity::Explicit)
|
RawVisibility::Module(path, VisibilityExplicitness::Explicit)
|
||||||
}
|
}
|
||||||
ast::VisibilityKind::PubSuper => {
|
ast::VisibilityKind::PubSuper => {
|
||||||
let path = ModPath::from_kind(PathKind::Super(1));
|
let path = ModPath::from_kind(PathKind::Super(1));
|
||||||
RawVisibility::Module(path, VisibilityExplicity::Explicit)
|
RawVisibility::Module(path, VisibilityExplicitness::Explicit)
|
||||||
}
|
}
|
||||||
ast::VisibilityKind::PubSelf => {
|
ast::VisibilityKind::PubSelf => {
|
||||||
let path = ModPath::from_kind(PathKind::Super(0));
|
let path = ModPath::from_kind(PathKind::Super(0));
|
||||||
RawVisibility::Module(path, VisibilityExplicity::Explicit)
|
RawVisibility::Module(path, VisibilityExplicitness::Explicit)
|
||||||
}
|
}
|
||||||
ast::VisibilityKind::Pub => RawVisibility::Public,
|
ast::VisibilityKind::Pub => RawVisibility::Public,
|
||||||
}
|
}
|
||||||
|
@ -85,7 +88,7 @@ impl RawVisibility {
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||||
pub enum Visibility {
|
pub enum Visibility {
|
||||||
/// Visibility is restricted to a certain module.
|
/// Visibility is restricted to a certain module.
|
||||||
Module(ModuleId, VisibilityExplicity),
|
Module(ModuleId, VisibilityExplicitness),
|
||||||
/// Visibility is unrestricted.
|
/// Visibility is unrestricted.
|
||||||
Public,
|
Public,
|
||||||
}
|
}
|
||||||
|
@ -206,12 +209,12 @@ impl Visibility {
|
||||||
|
|
||||||
/// Whether the item was imported through `pub(crate) use` or just `use`.
|
/// Whether the item was imported through `pub(crate) use` or just `use`.
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||||
pub enum VisibilityExplicity {
|
pub enum VisibilityExplicitness {
|
||||||
Explicit,
|
Explicit,
|
||||||
Implicit,
|
Implicit,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl VisibilityExplicity {
|
impl VisibilityExplicitness {
|
||||||
pub fn is_explicit(&self) -> bool {
|
pub fn is_explicit(&self) -> bool {
|
||||||
matches!(self, Self::Explicit)
|
matches!(self, Self::Explicit)
|
||||||
}
|
}
|
||||||
|
|
|
@ -155,7 +155,14 @@ impl PartialEq for AstIdMap {
|
||||||
impl Eq for AstIdMap {}
|
impl Eq for AstIdMap {}
|
||||||
|
|
||||||
impl AstIdMap {
|
impl AstIdMap {
|
||||||
pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
|
pub(crate) fn ast_id_map(
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
|
file_id: span::HirFileId,
|
||||||
|
) -> triomphe::Arc<AstIdMap> {
|
||||||
|
triomphe::Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_source(node: &SyntaxNode) -> AstIdMap {
|
||||||
assert!(node.parent().is_none());
|
assert!(node.parent().is_none());
|
||||||
let mut res = AstIdMap::default();
|
let mut res = AstIdMap::default();
|
||||||
|
|
||||||
|
|
|
@ -117,14 +117,10 @@ impl RawAttrs {
|
||||||
None => return smallvec![attr.clone()],
|
None => return smallvec![attr.clone()],
|
||||||
};
|
};
|
||||||
let index = attr.id;
|
let index = attr.id;
|
||||||
let attrs =
|
let attrs = parts
|
||||||
parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| {
|
.enumerate()
|
||||||
let tree = Subtree {
|
.take(1 << AttrId::CFG_ATTR_BITS)
|
||||||
delimiter: tt::Delimiter::invisible_spanned(attr.first()?.first_span()),
|
.filter_map(|(idx, attr)| Attr::from_tt(db, attr, index.with_cfg_attr(idx)));
|
||||||
token_trees: attr.to_vec(),
|
|
||||||
};
|
|
||||||
Attr::from_tt(db, &tree, index.with_cfg_attr(idx))
|
|
||||||
});
|
|
||||||
|
|
||||||
let cfg_options = &crate_graph[krate].cfg_options;
|
let cfg_options = &crate_graph[krate].cfg_options;
|
||||||
let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
|
let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
|
||||||
|
@ -222,12 +218,40 @@ impl Attr {
|
||||||
Some(Attr { id, path, input, span })
|
Some(Attr { id, path, input, span })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
|
fn from_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree], id: AttrId) -> Option<Attr> {
|
||||||
// FIXME: Unecessary roundtrip tt -> ast -> tt
|
let span = tt.first()?.first_span();
|
||||||
let (parse, map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
|
let path_end = tt
|
||||||
let ast = ast::Meta::cast(parse.syntax_node())?;
|
.iter()
|
||||||
|
.position(|tt| {
|
||||||
|
!matches!(
|
||||||
|
tt,
|
||||||
|
tt::TokenTree::Leaf(
|
||||||
|
tt::Leaf::Punct(tt::Punct { char: ':' | '$', .. }) | tt::Leaf::Ident(_),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.unwrap_or(tt.len());
|
||||||
|
|
||||||
Self::from_src(db, ast, SpanMapRef::ExpansionSpanMap(&map), id)
|
let (path, input) = tt.split_at(path_end);
|
||||||
|
let path = Interned::new(ModPath::from_tt(db, path)?);
|
||||||
|
|
||||||
|
let input = match input.first() {
|
||||||
|
Some(tt::TokenTree::Subtree(tree)) => {
|
||||||
|
Some(Interned::new(AttrInput::TokenTree(Box::new(tree.clone()))))
|
||||||
|
}
|
||||||
|
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))) => {
|
||||||
|
let input = match input.get(1) {
|
||||||
|
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text, .. }))) => {
|
||||||
|
//FIXME the trimming here isn't quite right, raw strings are not handled
|
||||||
|
Some(Interned::new(AttrInput::Literal(text.trim_matches('"').into())))
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
input
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
Some(Attr { id, path, input, span })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn path(&self) -> &ModPath {
|
pub fn path(&self) -> &ModPath {
|
||||||
|
@ -277,29 +301,8 @@ impl Attr {
|
||||||
.token_trees
|
.token_trees
|
||||||
.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
|
.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
|
||||||
.filter_map(move |tts| {
|
.filter_map(move |tts| {
|
||||||
if tts.is_empty() {
|
let span = tts.first()?.first_span();
|
||||||
return None;
|
Some((ModPath::from_tt(db, tts)?, span))
|
||||||
}
|
|
||||||
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation
|
|
||||||
// here or maybe just parse a mod path from a token tree directly
|
|
||||||
let subtree = tt::Subtree {
|
|
||||||
delimiter: tt::Delimiter::invisible_spanned(tts.first()?.first_span()),
|
|
||||||
token_trees: tts.to_vec(),
|
|
||||||
};
|
|
||||||
let (parse, span_map) =
|
|
||||||
mbe::token_tree_to_syntax_node(&subtree, mbe::TopEntryPoint::MetaItem);
|
|
||||||
let meta = ast::Meta::cast(parse.syntax_node())?;
|
|
||||||
// Only simple paths are allowed.
|
|
||||||
if meta.eq_token().is_some() || meta.expr().is_some() || meta.token_tree().is_some()
|
|
||||||
{
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
let path = meta.path()?;
|
|
||||||
let call_site = span_map.span_at(path.syntax().text_range().start());
|
|
||||||
Some((
|
|
||||||
ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?,
|
|
||||||
call_site,
|
|
||||||
))
|
|
||||||
});
|
});
|
||||||
|
|
||||||
Some(paths)
|
Some(paths)
|
||||||
|
|
|
@ -48,11 +48,13 @@ impl BuiltinAttrExpander {
|
||||||
|
|
||||||
register_builtin! { expand:
|
register_builtin! { expand:
|
||||||
(bench, Bench) => dummy_attr_expand,
|
(bench, Bench) => dummy_attr_expand,
|
||||||
|
(cfg, Cfg) => dummy_attr_expand,
|
||||||
|
(cfg_attr, CfgAttr) => dummy_attr_expand,
|
||||||
(cfg_accessible, CfgAccessible) => dummy_attr_expand,
|
(cfg_accessible, CfgAccessible) => dummy_attr_expand,
|
||||||
(cfg_eval, CfgEval) => dummy_attr_expand,
|
(cfg_eval, CfgEval) => dummy_attr_expand,
|
||||||
(derive, Derive) => derive_attr_expand,
|
(derive, Derive) => derive_expand,
|
||||||
// derive const is equivalent to derive for our proposes.
|
// derive const is equivalent to derive for our proposes.
|
||||||
(derive_const, DeriveConst) => derive_attr_expand,
|
(derive_const, DeriveConst) => derive_expand,
|
||||||
(global_allocator, GlobalAllocator) => dummy_attr_expand,
|
(global_allocator, GlobalAllocator) => dummy_attr_expand,
|
||||||
(test, Test) => dummy_attr_expand,
|
(test, Test) => dummy_attr_expand,
|
||||||
(test_case, TestCase) => dummy_attr_expand
|
(test_case, TestCase) => dummy_attr_expand
|
||||||
|
@ -91,7 +93,7 @@ fn dummy_attr_expand(
|
||||||
/// always resolve as a derive without nameres recollecting them.
|
/// always resolve as a derive without nameres recollecting them.
|
||||||
/// So this hacky approach is a lot more friendly for us, though it does require a bit of support in
|
/// So this hacky approach is a lot more friendly for us, though it does require a bit of support in
|
||||||
/// [`hir::Semantics`] to make this work.
|
/// [`hir::Semantics`] to make this work.
|
||||||
fn derive_attr_expand(
|
fn derive_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::Subtree,
|
||||||
|
|
|
@ -1,16 +1,11 @@
|
||||||
//! Defines database & queries for macro expansion.
|
//! Defines database & queries for macro expansion.
|
||||||
|
|
||||||
use std::sync::OnceLock;
|
use base_db::{salsa, CrateId, FileId, SourceDatabase};
|
||||||
|
|
||||||
use base_db::{
|
|
||||||
salsa::{self, debug::DebugQueryTable},
|
|
||||||
CrateId, Edition, FileId, SourceDatabase, VersionReq,
|
|
||||||
};
|
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use limit::Limit;
|
use limit::Limit;
|
||||||
use mbe::{syntax_node_to_token_tree, ValueResult};
|
use mbe::{syntax_node_to_token_tree, ValueResult};
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use span::{Span, SyntaxContextId};
|
use span::SyntaxContextId;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasAttrs},
|
ast::{self, HasAttrs},
|
||||||
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
|
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
|
||||||
|
@ -19,13 +14,14 @@ use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast_id_map::AstIdMap,
|
ast_id_map::AstIdMap,
|
||||||
attrs::{collect_attrs, RawAttrs},
|
attrs::collect_attrs,
|
||||||
builtin_attr_macro::pseudo_derive_attr_expansion,
|
builtin_attr_macro::pseudo_derive_attr_expansion,
|
||||||
builtin_fn_macro::EagerExpander,
|
builtin_fn_macro::EagerExpander,
|
||||||
|
declarative::DeclarativeMacroExpander,
|
||||||
fixup::{self, reverse_fixups, SyntaxFixupUndoInfo},
|
fixup::{self, reverse_fixups, SyntaxFixupUndoInfo},
|
||||||
hygiene::{
|
hygiene::{
|
||||||
apply_mark, span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt,
|
span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt,
|
||||||
SyntaxContextData, Transparency,
|
SyntaxContextData,
|
||||||
},
|
},
|
||||||
proc_macro::ProcMacros,
|
proc_macro::ProcMacros,
|
||||||
span_map::{RealSpanMap, SpanMap, SpanMapRef},
|
span_map::{RealSpanMap, SpanMap, SpanMapRef},
|
||||||
|
@ -43,82 +39,6 @@ use crate::{
|
||||||
/// Actual max for `analysis-stats .` at some point: 30672.
|
/// Actual max for `analysis-stats .` at some point: 30672.
|
||||||
static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
|
static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
||||||
/// Old-style `macro_rules` or the new macros 2.0
|
|
||||||
pub struct DeclarativeMacroExpander {
|
|
||||||
pub mac: mbe::DeclarativeMacro<span::Span>,
|
|
||||||
pub transparency: Transparency,
|
|
||||||
}
|
|
||||||
|
|
||||||
// FIXME: Remove this once we drop support for 1.76
|
|
||||||
static REQUIREMENT: OnceLock<VersionReq> = OnceLock::new();
|
|
||||||
|
|
||||||
impl DeclarativeMacroExpander {
|
|
||||||
pub fn expand(
|
|
||||||
&self,
|
|
||||||
db: &dyn ExpandDatabase,
|
|
||||||
tt: tt::Subtree,
|
|
||||||
call_id: MacroCallId,
|
|
||||||
) -> ExpandResult<tt::Subtree> {
|
|
||||||
let loc = db.lookup_intern_macro_call(call_id);
|
|
||||||
let toolchain = &db.crate_graph()[loc.def.krate].toolchain;
|
|
||||||
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
|
||||||
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
|
|
||||||
&base_db::Version {
|
|
||||||
pre: base_db::Prerelease::EMPTY,
|
|
||||||
build: base_db::BuildMetadata::EMPTY,
|
|
||||||
major: version.major,
|
|
||||||
minor: version.minor,
|
|
||||||
patch: version.patch,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
});
|
|
||||||
match self.mac.err() {
|
|
||||||
Some(e) => ExpandResult::new(
|
|
||||||
tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }),
|
|
||||||
ExpandError::other(format!("invalid macro definition: {e}")),
|
|
||||||
),
|
|
||||||
None => self
|
|
||||||
.mac
|
|
||||||
.expand(
|
|
||||||
&tt,
|
|
||||||
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
|
|
||||||
new_meta_vars,
|
|
||||||
loc.call_site,
|
|
||||||
)
|
|
||||||
.map_err(Into::into),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn expand_unhygienic(
|
|
||||||
&self,
|
|
||||||
db: &dyn ExpandDatabase,
|
|
||||||
tt: tt::Subtree,
|
|
||||||
krate: CrateId,
|
|
||||||
call_site: Span,
|
|
||||||
) -> ExpandResult<tt::Subtree> {
|
|
||||||
let toolchain = &db.crate_graph()[krate].toolchain;
|
|
||||||
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
|
||||||
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
|
|
||||||
&base_db::Version {
|
|
||||||
pre: base_db::Prerelease::EMPTY,
|
|
||||||
build: base_db::BuildMetadata::EMPTY,
|
|
||||||
major: version.major,
|
|
||||||
minor: version.minor,
|
|
||||||
patch: version.patch,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
});
|
|
||||||
match self.mac.err() {
|
|
||||||
Some(e) => ExpandResult::new(
|
|
||||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
|
||||||
ExpandError::other(format!("invalid macro definition: {e}")),
|
|
||||||
),
|
|
||||||
None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
pub enum TokenExpander {
|
pub enum TokenExpander {
|
||||||
/// Old-style `macro_rules` or the new macros 2.0
|
/// Old-style `macro_rules` or the new macros 2.0
|
||||||
|
@ -141,6 +61,7 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
#[salsa::input]
|
#[salsa::input]
|
||||||
fn proc_macros(&self) -> Arc<ProcMacros>;
|
fn proc_macros(&self) -> Arc<ProcMacros>;
|
||||||
|
|
||||||
|
#[salsa::invoke(AstIdMap::ast_id_map)]
|
||||||
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
|
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
|
||||||
|
|
||||||
/// Main public API -- parses a hir file, not caring whether it's a real
|
/// Main public API -- parses a hir file, not caring whether it's a real
|
||||||
|
@ -156,8 +77,13 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
macro_file: MacroFileId,
|
macro_file: MacroFileId,
|
||||||
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>;
|
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>;
|
||||||
#[salsa::transparent]
|
#[salsa::transparent]
|
||||||
|
#[salsa::invoke(SpanMap::new)]
|
||||||
fn span_map(&self, file_id: HirFileId) -> SpanMap;
|
fn span_map(&self, file_id: HirFileId) -> SpanMap;
|
||||||
|
|
||||||
|
#[salsa::transparent]
|
||||||
|
#[salsa::invoke(crate::span_map::expansion_span_map)]
|
||||||
|
fn expansion_span_map(&self, file_id: MacroFileId) -> Arc<ExpansionSpanMap>;
|
||||||
|
#[salsa::invoke(crate::span_map::real_span_map)]
|
||||||
fn real_span_map(&self, file_id: FileId) -> Arc<RealSpanMap>;
|
fn real_span_map(&self, file_id: FileId) -> Arc<RealSpanMap>;
|
||||||
|
|
||||||
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
|
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
|
||||||
|
@ -173,6 +99,7 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
#[salsa::transparent]
|
#[salsa::transparent]
|
||||||
fn setup_syntax_context_root(&self) -> ();
|
fn setup_syntax_context_root(&self) -> ();
|
||||||
#[salsa::transparent]
|
#[salsa::transparent]
|
||||||
|
#[salsa::invoke(crate::hygiene::dump_syntax_contexts)]
|
||||||
fn dump_syntax_contexts(&self) -> String;
|
fn dump_syntax_contexts(&self) -> String;
|
||||||
|
|
||||||
/// Lowers syntactic macro call to a token tree representation. That's a firewall
|
/// Lowers syntactic macro call to a token tree representation. That's a firewall
|
||||||
|
@ -184,8 +111,10 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>>;
|
) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>>;
|
||||||
/// Fetches the expander for this macro.
|
/// Fetches the expander for this macro.
|
||||||
#[salsa::transparent]
|
#[salsa::transparent]
|
||||||
|
#[salsa::invoke(TokenExpander::macro_expander)]
|
||||||
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
|
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
|
||||||
/// Fetches (and compiles) the expander of this decl macro.
|
/// Fetches (and compiles) the expander of this decl macro.
|
||||||
|
#[salsa::invoke(DeclarativeMacroExpander::expander)]
|
||||||
fn decl_macro_expander(
|
fn decl_macro_expander(
|
||||||
&self,
|
&self,
|
||||||
def_crate: CrateId,
|
def_crate: CrateId,
|
||||||
|
@ -203,36 +132,6 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
) -> ExpandResult<Box<[SyntaxError]>>;
|
) -> ExpandResult<Box<[SyntaxError]>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
|
||||||
pub fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap {
|
|
||||||
match file_id.repr() {
|
|
||||||
HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
|
|
||||||
HirFileIdRepr::MacroFile(m) => {
|
|
||||||
SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<RealSpanMap> {
|
|
||||||
use syntax::ast::HasModuleItem;
|
|
||||||
let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)];
|
|
||||||
let ast_id_map = db.ast_id_map(file_id.into());
|
|
||||||
let tree = db.parse(file_id).tree();
|
|
||||||
// FIXME: Descend into modules and other item containing items that are not annotated with attributes
|
|
||||||
// and allocate pairs for those as well. This gives us finer grained span anchors resulting in
|
|
||||||
// better incrementality
|
|
||||||
pairs.extend(
|
|
||||||
tree.items()
|
|
||||||
.map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())),
|
|
||||||
);
|
|
||||||
|
|
||||||
Arc::new(RealSpanMap::from_file(
|
|
||||||
file_id,
|
|
||||||
pairs.into_boxed_slice(),
|
|
||||||
tree.syntax().text_range().end(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This expands the given macro call, but with different arguments. This is
|
/// This expands the given macro call, but with different arguments. This is
|
||||||
/// used for completion, where we want to see what 'would happen' if we insert a
|
/// used for completion, where we want to see what 'would happen' if we insert a
|
||||||
/// token. The `token_to_map` mapped down into the expansion, with the mapped
|
/// token. The `token_to_map` mapped down into the expansion, with the mapped
|
||||||
|
@ -357,10 +256,6 @@ pub fn expand_speculative(
|
||||||
Some((node.syntax_node(), token))
|
Some((node.syntax_node(), token))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
|
|
||||||
Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id)))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
|
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
|
||||||
match file_id.repr() {
|
match file_id.repr() {
|
||||||
HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(),
|
HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(),
|
||||||
|
@ -388,7 +283,7 @@ fn parse_macro_expansion(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
macro_file: MacroFileId,
|
macro_file: MacroFileId,
|
||||||
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
|
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
|
||||||
let _p = profile::span("parse_macro_expansion");
|
let _p = tracing::span!(tracing::Level::INFO, "parse_macro_expansion").entered();
|
||||||
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||||
let expand_to = loc.expand_to();
|
let expand_to = loc.expand_to();
|
||||||
let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc);
|
let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc);
|
||||||
|
@ -412,7 +307,10 @@ fn parse_macro_expansion_error(
|
||||||
.map(|it| it.0.errors().to_vec().into_boxed_slice())
|
.map(|it| it.0.errors().to_vec().into_boxed_slice())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_with_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> (Parse<SyntaxNode>, SpanMap) {
|
pub(crate) fn parse_with_map(
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
|
file_id: HirFileId,
|
||||||
|
) -> (Parse<SyntaxNode>, SpanMap) {
|
||||||
match file_id.repr() {
|
match file_id.repr() {
|
||||||
HirFileIdRepr::FileId(file_id) => {
|
HirFileIdRepr::FileId(file_id) => {
|
||||||
(db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
|
(db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
|
||||||
|
@ -581,100 +479,18 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn decl_macro_expander(
|
impl TokenExpander {
|
||||||
db: &dyn ExpandDatabase,
|
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
|
||||||
def_crate: CrateId,
|
match id.kind {
|
||||||
id: AstId<ast::Macro>,
|
MacroDefKind::Declarative(ast_id) => {
|
||||||
) -> Arc<DeclarativeMacroExpander> {
|
TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id))
|
||||||
let crate_data = &db.crate_graph()[def_crate];
|
}
|
||||||
let is_2021 = crate_data.edition >= Edition::Edition2021;
|
MacroDefKind::BuiltIn(expander, _) => TokenExpander::BuiltIn(expander),
|
||||||
let (root, map) = parse_with_map(db, id.file_id);
|
MacroDefKind::BuiltInAttr(expander, _) => TokenExpander::BuiltInAttr(expander),
|
||||||
let root = root.syntax_node();
|
MacroDefKind::BuiltInDerive(expander, _) => TokenExpander::BuiltInDerive(expander),
|
||||||
|
MacroDefKind::BuiltInEager(expander, ..) => TokenExpander::BuiltInEager(expander),
|
||||||
let transparency = |node| {
|
MacroDefKind::ProcMacro(expander, ..) => TokenExpander::ProcMacro(expander),
|
||||||
// ... would be nice to have the item tree here
|
|
||||||
let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate);
|
|
||||||
match &*attrs
|
|
||||||
.iter()
|
|
||||||
.find(|it| {
|
|
||||||
it.path.as_ident().and_then(|it| it.as_str()) == Some("rustc_macro_transparency")
|
|
||||||
})?
|
|
||||||
.token_tree_value()?
|
|
||||||
.token_trees
|
|
||||||
{
|
|
||||||
[tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text {
|
|
||||||
"transparent" => Some(Transparency::Transparent),
|
|
||||||
"semitransparent" => Some(Transparency::SemiTransparent),
|
|
||||||
"opaque" => Some(Transparency::Opaque),
|
|
||||||
_ => None,
|
|
||||||
},
|
|
||||||
_ => None,
|
|
||||||
}
|
}
|
||||||
};
|
|
||||||
let toolchain = crate_data.toolchain.as_ref();
|
|
||||||
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
|
||||||
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
|
|
||||||
&base_db::Version {
|
|
||||||
pre: base_db::Prerelease::EMPTY,
|
|
||||||
build: base_db::BuildMetadata::EMPTY,
|
|
||||||
major: version.major,
|
|
||||||
minor: version.minor,
|
|
||||||
patch: version.patch,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
let (mac, transparency) = match id.to_ptr(db).to_node(&root) {
|
|
||||||
ast::Macro::MacroRules(macro_rules) => (
|
|
||||||
match macro_rules.token_tree() {
|
|
||||||
Some(arg) => {
|
|
||||||
let tt = mbe::syntax_node_to_token_tree(
|
|
||||||
arg.syntax(),
|
|
||||||
map.as_ref(),
|
|
||||||
map.span_for_range(macro_rules.macro_rules_token().unwrap().text_range()),
|
|
||||||
);
|
|
||||||
|
|
||||||
mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021, new_meta_vars)
|
|
||||||
}
|
|
||||||
None => mbe::DeclarativeMacro::from_err(
|
|
||||||
mbe::ParseError::Expected("expected a token tree".into()),
|
|
||||||
is_2021,
|
|
||||||
),
|
|
||||||
},
|
|
||||||
transparency(¯o_rules).unwrap_or(Transparency::SemiTransparent),
|
|
||||||
),
|
|
||||||
ast::Macro::MacroDef(macro_def) => (
|
|
||||||
match macro_def.body() {
|
|
||||||
Some(arg) => {
|
|
||||||
let tt = mbe::syntax_node_to_token_tree(
|
|
||||||
arg.syntax(),
|
|
||||||
map.as_ref(),
|
|
||||||
map.span_for_range(macro_def.macro_token().unwrap().text_range()),
|
|
||||||
);
|
|
||||||
|
|
||||||
mbe::DeclarativeMacro::parse_macro2(&tt, is_2021, new_meta_vars)
|
|
||||||
}
|
|
||||||
None => mbe::DeclarativeMacro::from_err(
|
|
||||||
mbe::ParseError::Expected("expected a token tree".into()),
|
|
||||||
is_2021,
|
|
||||||
),
|
|
||||||
},
|
|
||||||
transparency(¯o_def).unwrap_or(Transparency::Opaque),
|
|
||||||
),
|
|
||||||
};
|
|
||||||
Arc::new(DeclarativeMacroExpander { mac, transparency })
|
|
||||||
}
|
|
||||||
|
|
||||||
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
|
|
||||||
match id.kind {
|
|
||||||
MacroDefKind::Declarative(ast_id) => {
|
|
||||||
TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id))
|
|
||||||
}
|
|
||||||
MacroDefKind::BuiltIn(expander, _) => TokenExpander::BuiltIn(expander),
|
|
||||||
MacroDefKind::BuiltInAttr(expander, _) => TokenExpander::BuiltInAttr(expander),
|
|
||||||
MacroDefKind::BuiltInDerive(expander, _) => TokenExpander::BuiltInDerive(expander),
|
|
||||||
MacroDefKind::BuiltInEager(expander, ..) => TokenExpander::BuiltInEager(expander),
|
|
||||||
MacroDefKind::ProcMacro(expander, ..) => TokenExpander::ProcMacro(expander),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -688,7 +504,7 @@ fn macro_expand(
|
||||||
macro_call_id: MacroCallId,
|
macro_call_id: MacroCallId,
|
||||||
loc: MacroCallLoc,
|
loc: MacroCallLoc,
|
||||||
) -> ExpandResult<CowArc<tt::Subtree>> {
|
) -> ExpandResult<CowArc<tt::Subtree>> {
|
||||||
let _p = profile::span("macro_expand");
|
let _p = tracing::span!(tracing::Level::INFO, "macro_expand").entered();
|
||||||
|
|
||||||
let ExpandResult { value: tt, mut err } = match loc.def.kind {
|
let ExpandResult { value: tt, mut err } = match loc.def.kind {
|
||||||
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id).map(CowArc::Arc),
|
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id).map(CowArc::Arc),
|
||||||
|
@ -862,40 +678,3 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> {
|
||||||
fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
|
fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
|
||||||
db.intern_syntax_context(SyntaxContextData::root());
|
db.intern_syntax_context(SyntaxContextData::root());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
|
|
||||||
let mut s = String::from("Expansions:");
|
|
||||||
let mut entries = InternMacroCallLookupQuery.in_db(db).entries::<Vec<_>>();
|
|
||||||
entries.sort_by_key(|e| e.key);
|
|
||||||
for e in entries {
|
|
||||||
let id = e.key;
|
|
||||||
let expn_data = e.value.as_ref().unwrap();
|
|
||||||
s.push_str(&format!(
|
|
||||||
"\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}",
|
|
||||||
id,
|
|
||||||
expn_data.kind.file_id(),
|
|
||||||
expn_data.call_site,
|
|
||||||
SyntaxContextId::ROOT, // FIXME expn_data.def_site,
|
|
||||||
expn_data.kind.descr(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
s.push_str("\n\nSyntaxContexts:\n");
|
|
||||||
let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::<Vec<_>>();
|
|
||||||
entries.sort_by_key(|e| e.key);
|
|
||||||
for e in entries {
|
|
||||||
struct SyntaxContextDebug<'a>(
|
|
||||||
&'a dyn ExpandDatabase,
|
|
||||||
SyntaxContextId,
|
|
||||||
&'a SyntaxContextData,
|
|
||||||
);
|
|
||||||
|
|
||||||
impl<'a> std::fmt::Debug for SyntaxContextDebug<'a> {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
self.2.fancy_debug(self.1, self.0, f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap()));
|
|
||||||
}
|
|
||||||
s
|
|
||||||
}
|
|
||||||
|
|
177
crates/hir-expand/src/declarative.rs
Normal file
177
crates/hir-expand/src/declarative.rs
Normal file
|
@ -0,0 +1,177 @@
|
||||||
|
//! Compiled declarative macro expanders (`macro_rules!`` and `macro`)
|
||||||
|
use std::sync::OnceLock;
|
||||||
|
|
||||||
|
use base_db::{CrateId, Edition, VersionReq};
|
||||||
|
use span::{MacroCallId, Span};
|
||||||
|
use syntax::{ast, AstNode};
|
||||||
|
use triomphe::Arc;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
attrs::RawAttrs,
|
||||||
|
db::ExpandDatabase,
|
||||||
|
hygiene::{apply_mark, Transparency},
|
||||||
|
tt, AstId, ExpandError, ExpandResult,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Old-style `macro_rules` or the new macros 2.0
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
|
pub struct DeclarativeMacroExpander {
|
||||||
|
pub mac: mbe::DeclarativeMacro<span::Span>,
|
||||||
|
pub transparency: Transparency,
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME: Remove this once we drop support for 1.76
|
||||||
|
static REQUIREMENT: OnceLock<VersionReq> = OnceLock::new();
|
||||||
|
|
||||||
|
impl DeclarativeMacroExpander {
|
||||||
|
pub fn expand(
|
||||||
|
&self,
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
|
tt: tt::Subtree,
|
||||||
|
call_id: MacroCallId,
|
||||||
|
) -> ExpandResult<tt::Subtree> {
|
||||||
|
let loc = db.lookup_intern_macro_call(call_id);
|
||||||
|
let toolchain = &db.crate_graph()[loc.def.krate].toolchain;
|
||||||
|
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
||||||
|
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
|
||||||
|
&base_db::Version {
|
||||||
|
pre: base_db::Prerelease::EMPTY,
|
||||||
|
build: base_db::BuildMetadata::EMPTY,
|
||||||
|
major: version.major,
|
||||||
|
minor: version.minor,
|
||||||
|
patch: version.patch,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
});
|
||||||
|
match self.mac.err() {
|
||||||
|
Some(e) => ExpandResult::new(
|
||||||
|
tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }),
|
||||||
|
ExpandError::other(format!("invalid macro definition: {e}")),
|
||||||
|
),
|
||||||
|
None => self
|
||||||
|
.mac
|
||||||
|
.expand(
|
||||||
|
&tt,
|
||||||
|
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
|
||||||
|
new_meta_vars,
|
||||||
|
loc.call_site,
|
||||||
|
)
|
||||||
|
.map_err(Into::into),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn expand_unhygienic(
|
||||||
|
&self,
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
|
tt: tt::Subtree,
|
||||||
|
krate: CrateId,
|
||||||
|
call_site: Span,
|
||||||
|
) -> ExpandResult<tt::Subtree> {
|
||||||
|
let toolchain = &db.crate_graph()[krate].toolchain;
|
||||||
|
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
||||||
|
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
|
||||||
|
&base_db::Version {
|
||||||
|
pre: base_db::Prerelease::EMPTY,
|
||||||
|
build: base_db::BuildMetadata::EMPTY,
|
||||||
|
major: version.major,
|
||||||
|
minor: version.minor,
|
||||||
|
patch: version.patch,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
});
|
||||||
|
match self.mac.err() {
|
||||||
|
Some(e) => ExpandResult::new(
|
||||||
|
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||||
|
ExpandError::other(format!("invalid macro definition: {e}")),
|
||||||
|
),
|
||||||
|
None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn expander(
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
|
def_crate: CrateId,
|
||||||
|
id: AstId<ast::Macro>,
|
||||||
|
) -> Arc<DeclarativeMacroExpander> {
|
||||||
|
let crate_data = &db.crate_graph()[def_crate];
|
||||||
|
let is_2021 = crate_data.edition >= Edition::Edition2021;
|
||||||
|
let (root, map) = crate::db::parse_with_map(db, id.file_id);
|
||||||
|
let root = root.syntax_node();
|
||||||
|
|
||||||
|
let transparency = |node| {
|
||||||
|
// ... would be nice to have the item tree here
|
||||||
|
let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate);
|
||||||
|
match &*attrs
|
||||||
|
.iter()
|
||||||
|
.find(|it| {
|
||||||
|
it.path.as_ident().and_then(|it| it.as_str())
|
||||||
|
== Some("rustc_macro_transparency")
|
||||||
|
})?
|
||||||
|
.token_tree_value()?
|
||||||
|
.token_trees
|
||||||
|
{
|
||||||
|
[tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text {
|
||||||
|
"transparent" => Some(Transparency::Transparent),
|
||||||
|
"semitransparent" => Some(Transparency::SemiTransparent),
|
||||||
|
"opaque" => Some(Transparency::Opaque),
|
||||||
|
_ => None,
|
||||||
|
},
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let toolchain = crate_data.toolchain.as_ref();
|
||||||
|
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
||||||
|
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
|
||||||
|
&base_db::Version {
|
||||||
|
pre: base_db::Prerelease::EMPTY,
|
||||||
|
build: base_db::BuildMetadata::EMPTY,
|
||||||
|
major: version.major,
|
||||||
|
minor: version.minor,
|
||||||
|
patch: version.patch,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
let (mac, transparency) = match id.to_ptr(db).to_node(&root) {
|
||||||
|
ast::Macro::MacroRules(macro_rules) => (
|
||||||
|
match macro_rules.token_tree() {
|
||||||
|
Some(arg) => {
|
||||||
|
let tt = mbe::syntax_node_to_token_tree(
|
||||||
|
arg.syntax(),
|
||||||
|
map.as_ref(),
|
||||||
|
map.span_for_range(
|
||||||
|
macro_rules.macro_rules_token().unwrap().text_range(),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021, new_meta_vars)
|
||||||
|
}
|
||||||
|
None => mbe::DeclarativeMacro::from_err(
|
||||||
|
mbe::ParseError::Expected("expected a token tree".into()),
|
||||||
|
is_2021,
|
||||||
|
),
|
||||||
|
},
|
||||||
|
transparency(¯o_rules).unwrap_or(Transparency::SemiTransparent),
|
||||||
|
),
|
||||||
|
ast::Macro::MacroDef(macro_def) => (
|
||||||
|
match macro_def.body() {
|
||||||
|
Some(arg) => {
|
||||||
|
let tt = mbe::syntax_node_to_token_tree(
|
||||||
|
arg.syntax(),
|
||||||
|
map.as_ref(),
|
||||||
|
map.span_for_range(macro_def.macro_token().unwrap().text_range()),
|
||||||
|
);
|
||||||
|
|
||||||
|
mbe::DeclarativeMacro::parse_macro2(&tt, is_2021, new_meta_vars)
|
||||||
|
}
|
||||||
|
None => mbe::DeclarativeMacro::from_err(
|
||||||
|
mbe::ParseError::Expected("expected a token tree".into()),
|
||||||
|
is_2021,
|
||||||
|
),
|
||||||
|
},
|
||||||
|
transparency(¯o_def).unwrap_or(Transparency::Opaque),
|
||||||
|
),
|
||||||
|
};
|
||||||
|
Arc::new(DeclarativeMacroExpander { mac, transparency })
|
||||||
|
}
|
||||||
|
}
|
|
@ -5,7 +5,7 @@ use either::Either;
|
||||||
use span::{FileId, FileRange, HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId};
|
use span::{FileId, FileRange, HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId};
|
||||||
use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize};
|
use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize};
|
||||||
|
|
||||||
use crate::{db, ExpansionInfo, MacroFileIdExt};
|
use crate::{db, map_node_range_up, span_for_offset, MacroFileIdExt};
|
||||||
|
|
||||||
/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
|
/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
|
||||||
///
|
///
|
||||||
|
@ -147,7 +147,7 @@ impl InFile<&SyntaxNode> {
|
||||||
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
|
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
|
||||||
HirFileIdRepr::MacroFile(mac_file) => {
|
HirFileIdRepr::MacroFile(mac_file) => {
|
||||||
if let Some((res, ctxt)) =
|
if let Some((res, ctxt)) =
|
||||||
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
|
map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range())
|
||||||
{
|
{
|
||||||
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
|
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
|
||||||
// keep pre-token map rewrite behaviour.
|
// keep pre-token map rewrite behaviour.
|
||||||
|
@ -163,12 +163,15 @@ impl InFile<&SyntaxNode> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Falls back to the macro call range if the node cannot be mapped up fully.
|
/// Falls back to the macro call range if the node cannot be mapped up fully.
|
||||||
pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange {
|
pub fn original_file_range_with_macro_call_body(
|
||||||
|
self,
|
||||||
|
db: &dyn db::ExpandDatabase,
|
||||||
|
) -> FileRange {
|
||||||
match self.file_id.repr() {
|
match self.file_id.repr() {
|
||||||
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
|
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
|
||||||
HirFileIdRepr::MacroFile(mac_file) => {
|
HirFileIdRepr::MacroFile(mac_file) => {
|
||||||
if let Some((res, ctxt)) =
|
if let Some((res, ctxt)) =
|
||||||
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
|
map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range())
|
||||||
{
|
{
|
||||||
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
|
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
|
||||||
// keep pre-token map rewrite behaviour.
|
// keep pre-token map rewrite behaviour.
|
||||||
|
@ -193,7 +196,7 @@ impl InFile<&SyntaxNode> {
|
||||||
Some((FileRange { file_id, range: self.value.text_range() }, SyntaxContextId::ROOT))
|
Some((FileRange { file_id, range: self.value.text_range() }, SyntaxContextId::ROOT))
|
||||||
}
|
}
|
||||||
HirFileIdRepr::MacroFile(mac_file) => {
|
HirFileIdRepr::MacroFile(mac_file) => {
|
||||||
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
|
map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -215,7 +218,7 @@ impl InFile<&SyntaxNode> {
|
||||||
}
|
}
|
||||||
|
|
||||||
let (FileRange { file_id, range }, ctx) =
|
let (FileRange { file_id, range }, ctx) =
|
||||||
ExpansionInfo::new(db, file_id).map_node_range_up(db, self.value.text_range())?;
|
map_node_range_up(db, &db.expansion_span_map(file_id), self.value.text_range())?;
|
||||||
|
|
||||||
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
|
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
|
||||||
// keep pre-token map rewrite behaviour.
|
// keep pre-token map rewrite behaviour.
|
||||||
|
@ -246,8 +249,11 @@ impl InFile<SyntaxToken> {
|
||||||
match self.file_id.repr() {
|
match self.file_id.repr() {
|
||||||
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
|
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
|
||||||
HirFileIdRepr::MacroFile(mac_file) => {
|
HirFileIdRepr::MacroFile(mac_file) => {
|
||||||
let (range, ctxt) = ExpansionInfo::new(db, mac_file)
|
let (range, ctxt) = span_for_offset(
|
||||||
.span_for_offset(db, self.value.text_range().start());
|
db,
|
||||||
|
&db.expansion_span_map(mac_file),
|
||||||
|
self.value.text_range().start(),
|
||||||
|
);
|
||||||
|
|
||||||
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
|
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
|
||||||
// keep pre-token map rewrite behaviour.
|
// keep pre-token map rewrite behaviour.
|
||||||
|
@ -269,8 +275,11 @@ impl InFile<SyntaxToken> {
|
||||||
Some(FileRange { file_id, range: self.value.text_range() })
|
Some(FileRange { file_id, range: self.value.text_range() })
|
||||||
}
|
}
|
||||||
HirFileIdRepr::MacroFile(mac_file) => {
|
HirFileIdRepr::MacroFile(mac_file) => {
|
||||||
let (range, ctxt) = ExpansionInfo::new(db, mac_file)
|
let (range, ctxt) = span_for_offset(
|
||||||
.span_for_offset(db, self.value.text_range().start());
|
db,
|
||||||
|
&db.expansion_span_map(mac_file),
|
||||||
|
self.value.text_range().start(),
|
||||||
|
);
|
||||||
|
|
||||||
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
|
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
|
||||||
// keep pre-token map rewrite behaviour.
|
// keep pre-token map rewrite behaviour.
|
||||||
|
@ -286,7 +295,7 @@ impl InFile<SyntaxToken> {
|
||||||
|
|
||||||
impl InMacroFile<TextSize> {
|
impl InMacroFile<TextSize> {
|
||||||
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> (FileRange, SyntaxContextId) {
|
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> (FileRange, SyntaxContextId) {
|
||||||
ExpansionInfo::new(db, self.file_id).span_for_offset(db, self.value)
|
span_for_offset(db, &db.expansion_span_map(self.file_id), self.value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -300,7 +309,7 @@ impl InFile<TextRange> {
|
||||||
(FileRange { file_id, range: self.value }, SyntaxContextId::ROOT)
|
(FileRange { file_id, range: self.value }, SyntaxContextId::ROOT)
|
||||||
}
|
}
|
||||||
HirFileIdRepr::MacroFile(mac_file) => {
|
HirFileIdRepr::MacroFile(mac_file) => {
|
||||||
match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) {
|
match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) {
|
||||||
Some(it) => it,
|
Some(it) => it,
|
||||||
None => {
|
None => {
|
||||||
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
|
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
|
||||||
|
@ -315,7 +324,7 @@ impl InFile<TextRange> {
|
||||||
match self.file_id.repr() {
|
match self.file_id.repr() {
|
||||||
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
|
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
|
||||||
HirFileIdRepr::MacroFile(mac_file) => {
|
HirFileIdRepr::MacroFile(mac_file) => {
|
||||||
match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) {
|
match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) {
|
||||||
Some((it, SyntaxContextId::ROOT)) => it,
|
Some((it, SyntaxContextId::ROOT)) => it,
|
||||||
_ => {
|
_ => {
|
||||||
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
|
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
|
||||||
|
@ -335,7 +344,7 @@ impl InFile<TextRange> {
|
||||||
Some((FileRange { file_id, range: self.value }, SyntaxContextId::ROOT))
|
Some((FileRange { file_id, range: self.value }, SyntaxContextId::ROOT))
|
||||||
}
|
}
|
||||||
HirFileIdRepr::MacroFile(mac_file) => {
|
HirFileIdRepr::MacroFile(mac_file) => {
|
||||||
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value)
|
map_node_range_up(db, &db.expansion_span_map(mac_file), self.value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -355,8 +364,11 @@ impl<N: AstNode> InFile<N> {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let (FileRange { file_id, range }, ctx) = ExpansionInfo::new(db, file_id)
|
let (FileRange { file_id, range }, ctx) = map_node_range_up(
|
||||||
.map_node_range_up(db, self.value.syntax().text_range())?;
|
db,
|
||||||
|
&db.expansion_span_map(file_id),
|
||||||
|
self.value.syntax().text_range(),
|
||||||
|
)?;
|
||||||
|
|
||||||
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
|
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
|
||||||
// keep pre-token map rewrite behaviour.
|
// keep pre-token map rewrite behaviour.
|
||||||
|
|
|
@ -245,3 +245,43 @@ pub fn marks_rev(
|
||||||
})
|
})
|
||||||
.map(|ctx| ctx.outer_mark(db))
|
.map(|ctx| ctx.outer_mark(db))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
|
||||||
|
use crate::db::{InternMacroCallLookupQuery, InternSyntaxContextLookupQuery};
|
||||||
|
use base_db::salsa::debug::DebugQueryTable;
|
||||||
|
|
||||||
|
let mut s = String::from("Expansions:");
|
||||||
|
let mut entries = InternMacroCallLookupQuery.in_db(db).entries::<Vec<_>>();
|
||||||
|
entries.sort_by_key(|e| e.key);
|
||||||
|
for e in entries {
|
||||||
|
let id = e.key;
|
||||||
|
let expn_data = e.value.as_ref().unwrap();
|
||||||
|
s.push_str(&format!(
|
||||||
|
"\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}",
|
||||||
|
id,
|
||||||
|
expn_data.kind.file_id(),
|
||||||
|
expn_data.call_site,
|
||||||
|
SyntaxContextId::ROOT, // FIXME expn_data.def_site,
|
||||||
|
expn_data.kind.descr(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
s.push_str("\n\nSyntaxContexts:\n");
|
||||||
|
let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::<Vec<_>>();
|
||||||
|
entries.sort_by_key(|e| e.key);
|
||||||
|
for e in entries {
|
||||||
|
struct SyntaxContextDebug<'a>(
|
||||||
|
&'a dyn ExpandDatabase,
|
||||||
|
SyntaxContextId,
|
||||||
|
&'a SyntaxContextData,
|
||||||
|
);
|
||||||
|
|
||||||
|
impl<'a> std::fmt::Debug for SyntaxContextDebug<'a> {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
self.2.fancy_debug(self.1, self.0, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap()));
|
||||||
|
}
|
||||||
|
s
|
||||||
|
}
|
||||||
|
|
|
@ -11,16 +11,18 @@ pub mod attrs;
|
||||||
pub mod builtin_attr_macro;
|
pub mod builtin_attr_macro;
|
||||||
pub mod builtin_derive_macro;
|
pub mod builtin_derive_macro;
|
||||||
pub mod builtin_fn_macro;
|
pub mod builtin_fn_macro;
|
||||||
|
pub mod change;
|
||||||
pub mod db;
|
pub mod db;
|
||||||
|
pub mod declarative;
|
||||||
pub mod eager;
|
pub mod eager;
|
||||||
pub mod files;
|
pub mod files;
|
||||||
pub mod change;
|
|
||||||
pub mod hygiene;
|
pub mod hygiene;
|
||||||
pub mod mod_path;
|
pub mod mod_path;
|
||||||
pub mod name;
|
pub mod name;
|
||||||
pub mod proc_macro;
|
pub mod proc_macro;
|
||||||
pub mod quote;
|
pub mod quote;
|
||||||
pub mod span_map;
|
pub mod span_map;
|
||||||
|
|
||||||
mod fixup;
|
mod fixup;
|
||||||
|
|
||||||
use attrs::collect_attrs;
|
use attrs::collect_attrs;
|
||||||
|
@ -167,7 +169,8 @@ pub struct MacroCallLoc {
|
||||||
pub krate: CrateId,
|
pub krate: CrateId,
|
||||||
/// Some if this is a macro call for an eager macro. Note that this is `None`
|
/// Some if this is a macro call for an eager macro. Note that this is `None`
|
||||||
/// for the eager input macro file.
|
/// for the eager input macro file.
|
||||||
// FIXME: This seems bad to save in an interned structure
|
// FIXME: This is being interned, subtrees can vary quickly differ just slightly causing
|
||||||
|
// leakage problems here
|
||||||
eager: Option<Arc<EagerCallInfo>>,
|
eager: Option<Arc<EagerCallInfo>>,
|
||||||
pub kind: MacroCallKind,
|
pub kind: MacroCallKind,
|
||||||
pub call_site: Span,
|
pub call_site: Span,
|
||||||
|
@ -220,7 +223,7 @@ pub enum MacroCallKind {
|
||||||
},
|
},
|
||||||
Attr {
|
Attr {
|
||||||
ast_id: AstId<ast::Item>,
|
ast_id: AstId<ast::Item>,
|
||||||
// FIXME: This is being interned, subtrees can very quickly differ just slightly causing
|
// FIXME: This is being interned, subtrees can vary quickly differ just slightly causing
|
||||||
// leakage problems here
|
// leakage problems here
|
||||||
attr_args: Option<Arc<tt::Subtree>>,
|
attr_args: Option<Arc<tt::Subtree>>,
|
||||||
/// Syntactical index of the invoking `#[attribute]`.
|
/// Syntactical index of the invoking `#[attribute]`.
|
||||||
|
@ -520,6 +523,24 @@ impl MacroCallLoc {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn include_file_id(
|
||||||
|
&self,
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
|
macro_call_id: MacroCallId,
|
||||||
|
) -> Option<FileId> {
|
||||||
|
if self.def.is_include() {
|
||||||
|
if let Some(eager) = &self.eager {
|
||||||
|
if let Ok(it) =
|
||||||
|
builtin_fn_macro::include_input_to_file_id(db, macro_call_id, &eager.arg)
|
||||||
|
{
|
||||||
|
return Some(it);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MacroCallKind {
|
impl MacroCallKind {
|
||||||
|
@ -656,6 +677,10 @@ impl ExpansionInfo {
|
||||||
Some(self.arg.with_value(self.arg.value.as_ref()?.parent()?))
|
Some(self.arg.with_value(self.arg.value.as_ref()?.parent()?))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn call_file(&self) -> HirFileId {
|
||||||
|
self.arg.file_id
|
||||||
|
}
|
||||||
|
|
||||||
/// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
|
/// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
|
||||||
pub fn map_range_down(
|
pub fn map_range_down(
|
||||||
&self,
|
&self,
|
||||||
|
@ -676,13 +701,7 @@ impl ExpansionInfo {
|
||||||
offset: TextSize,
|
offset: TextSize,
|
||||||
) -> (FileRange, SyntaxContextId) {
|
) -> (FileRange, SyntaxContextId) {
|
||||||
debug_assert!(self.expanded.value.text_range().contains(offset));
|
debug_assert!(self.expanded.value.text_range().contains(offset));
|
||||||
let span = self.exp_map.span_at(offset);
|
span_for_offset(db, &self.exp_map, offset)
|
||||||
let anchor_offset = db
|
|
||||||
.ast_id_map(span.anchor.file_id.into())
|
|
||||||
.get_erased(span.anchor.ast_id)
|
|
||||||
.text_range()
|
|
||||||
.start();
|
|
||||||
(FileRange { file_id: span.anchor.file_id, range: span.range + anchor_offset }, span.ctx)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Maps up the text range out of the expansion hierarchy back into the original file its from.
|
/// Maps up the text range out of the expansion hierarchy back into the original file its from.
|
||||||
|
@ -692,27 +711,7 @@ impl ExpansionInfo {
|
||||||
range: TextRange,
|
range: TextRange,
|
||||||
) -> Option<(FileRange, SyntaxContextId)> {
|
) -> Option<(FileRange, SyntaxContextId)> {
|
||||||
debug_assert!(self.expanded.value.text_range().contains_range(range));
|
debug_assert!(self.expanded.value.text_range().contains_range(range));
|
||||||
let mut spans = self.exp_map.spans_for_range(range);
|
map_node_range_up(db, &self.exp_map, range)
|
||||||
let Span { range, anchor, ctx } = spans.next()?;
|
|
||||||
let mut start = range.start();
|
|
||||||
let mut end = range.end();
|
|
||||||
|
|
||||||
for span in spans {
|
|
||||||
if span.anchor != anchor || span.ctx != ctx {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
start = start.min(span.range.start());
|
|
||||||
end = end.max(span.range.end());
|
|
||||||
}
|
|
||||||
let anchor_offset =
|
|
||||||
db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
|
|
||||||
Some((
|
|
||||||
FileRange {
|
|
||||||
file_id: anchor.file_id,
|
|
||||||
range: TextRange::new(start, end) + anchor_offset,
|
|
||||||
},
|
|
||||||
ctx,
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Maps up the text range out of the expansion into is macro call.
|
/// Maps up the text range out of the expansion into is macro call.
|
||||||
|
@ -801,6 +800,47 @@ impl ExpansionInfo {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Maps up the text range out of the expansion hierarchy back into the original file its from.
|
||||||
|
pub fn map_node_range_up(
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
|
exp_map: &ExpansionSpanMap,
|
||||||
|
range: TextRange,
|
||||||
|
) -> Option<(FileRange, SyntaxContextId)> {
|
||||||
|
let mut spans = exp_map.spans_for_range(range);
|
||||||
|
let Span { range, anchor, ctx } = spans.next()?;
|
||||||
|
let mut start = range.start();
|
||||||
|
let mut end = range.end();
|
||||||
|
|
||||||
|
for span in spans {
|
||||||
|
if span.anchor != anchor || span.ctx != ctx {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
start = start.min(span.range.start());
|
||||||
|
end = end.max(span.range.end());
|
||||||
|
}
|
||||||
|
let anchor_offset =
|
||||||
|
db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
|
||||||
|
Some((
|
||||||
|
FileRange { file_id: anchor.file_id, range: TextRange::new(start, end) + anchor_offset },
|
||||||
|
ctx,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Looks up the span at the given offset.
|
||||||
|
pub fn span_for_offset(
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
|
exp_map: &ExpansionSpanMap,
|
||||||
|
offset: TextSize,
|
||||||
|
) -> (FileRange, SyntaxContextId) {
|
||||||
|
let span = exp_map.span_at(offset);
|
||||||
|
let anchor_offset = db
|
||||||
|
.ast_id_map(span.anchor.file_id.into())
|
||||||
|
.get_erased(span.anchor.ast_id)
|
||||||
|
.text_range()
|
||||||
|
.start();
|
||||||
|
(FileRange { file_id: span.anchor.file_id, range: span.range + anchor_offset }, span.ctx)
|
||||||
|
}
|
||||||
|
|
||||||
/// In Rust, macros expand token trees to token trees. When we want to turn a
|
/// In Rust, macros expand token trees to token trees. When we want to turn a
|
||||||
/// token tree into an AST node, we need to figure out what kind of AST node we
|
/// token tree into an AST node, we need to figure out what kind of AST node we
|
||||||
/// want: something like `foo` can be a type, an expression, or a pattern.
|
/// want: something like `foo` can be a type, an expression, or a pattern.
|
||||||
|
|
|
@ -10,6 +10,7 @@ use crate::{
|
||||||
hygiene::{marks_rev, SyntaxContextExt, Transparency},
|
hygiene::{marks_rev, SyntaxContextExt, Transparency},
|
||||||
name::{known, AsName, Name},
|
name::{known, AsName, Name},
|
||||||
span_map::SpanMapRef,
|
span_map::SpanMapRef,
|
||||||
|
tt,
|
||||||
};
|
};
|
||||||
use base_db::CrateId;
|
use base_db::CrateId;
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
|
@ -39,7 +40,7 @@ pub enum PathKind {
|
||||||
Crate,
|
Crate,
|
||||||
/// Absolute path (::foo)
|
/// Absolute path (::foo)
|
||||||
Abs,
|
Abs,
|
||||||
// FIXME: Remove this
|
// FIXME: Can we remove this somehow?
|
||||||
/// `$crate` from macro expansion
|
/// `$crate` from macro expansion
|
||||||
DollarCrate(CrateId),
|
DollarCrate(CrateId),
|
||||||
}
|
}
|
||||||
|
@ -50,11 +51,16 @@ impl ModPath {
|
||||||
path: ast::Path,
|
path: ast::Path,
|
||||||
span_map: SpanMapRef<'_>,
|
span_map: SpanMapRef<'_>,
|
||||||
) -> Option<ModPath> {
|
) -> Option<ModPath> {
|
||||||
convert_path(db, None, path, span_map)
|
convert_path(db, path, span_map)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModPath> {
|
||||||
|
convert_path_tt(db, tt)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> ModPath {
|
pub fn from_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> ModPath {
|
||||||
let segments = segments.into_iter().collect();
|
let mut segments: SmallVec<_> = segments.into_iter().collect();
|
||||||
|
segments.shrink_to_fit();
|
||||||
ModPath { kind, segments }
|
ModPath { kind, segments }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -193,22 +199,15 @@ fn display_fmt_path(
|
||||||
|
|
||||||
fn convert_path(
|
fn convert_path(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
prefix: Option<ModPath>,
|
|
||||||
path: ast::Path,
|
path: ast::Path,
|
||||||
span_map: SpanMapRef<'_>,
|
span_map: SpanMapRef<'_>,
|
||||||
) -> Option<ModPath> {
|
) -> Option<ModPath> {
|
||||||
let prefix = match path.qualifier() {
|
let mut segments = path.segments();
|
||||||
Some(qual) => Some(convert_path(db, prefix, qual, span_map)?),
|
|
||||||
None => prefix,
|
|
||||||
};
|
|
||||||
|
|
||||||
let segment = path.segment()?;
|
let segment = &segments.next()?;
|
||||||
let mut mod_path = match segment.kind()? {
|
let mut mod_path = match segment.kind()? {
|
||||||
ast::PathSegmentKind::Name(name_ref) => {
|
ast::PathSegmentKind::Name(name_ref) => {
|
||||||
if name_ref.text() == "$crate" {
|
if name_ref.text() == "$crate" {
|
||||||
if prefix.is_some() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
ModPath::from_kind(
|
ModPath::from_kind(
|
||||||
resolve_crate_root(
|
resolve_crate_root(
|
||||||
db,
|
db,
|
||||||
|
@ -218,41 +217,36 @@ fn convert_path(
|
||||||
.unwrap_or(PathKind::Crate),
|
.unwrap_or(PathKind::Crate),
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
let mut res = prefix.unwrap_or_else(|| {
|
let mut res = ModPath::from_kind(
|
||||||
ModPath::from_kind(
|
segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
|
||||||
segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
|
);
|
||||||
)
|
|
||||||
});
|
|
||||||
res.segments.push(name_ref.as_name());
|
res.segments.push(name_ref.as_name());
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::PathSegmentKind::SelfTypeKw => {
|
ast::PathSegmentKind::SelfTypeKw => {
|
||||||
if prefix.is_some() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
ModPath::from_segments(PathKind::Plain, Some(known::SELF_TYPE))
|
ModPath::from_segments(PathKind::Plain, Some(known::SELF_TYPE))
|
||||||
}
|
}
|
||||||
ast::PathSegmentKind::CrateKw => {
|
ast::PathSegmentKind::CrateKw => ModPath::from_segments(PathKind::Crate, iter::empty()),
|
||||||
if prefix.is_some() {
|
ast::PathSegmentKind::SelfKw => ModPath::from_segments(PathKind::Super(0), iter::empty()),
|
||||||
return None;
|
|
||||||
}
|
|
||||||
ModPath::from_segments(PathKind::Crate, iter::empty())
|
|
||||||
}
|
|
||||||
ast::PathSegmentKind::SelfKw => {
|
|
||||||
if prefix.is_some() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
ModPath::from_segments(PathKind::Super(0), iter::empty())
|
|
||||||
}
|
|
||||||
ast::PathSegmentKind::SuperKw => {
|
ast::PathSegmentKind::SuperKw => {
|
||||||
let nested_super_count = match prefix.map(|p| p.kind) {
|
let mut deg = 1;
|
||||||
Some(PathKind::Super(n)) => n,
|
let mut next_segment = None;
|
||||||
Some(_) => return None,
|
for segment in segments.by_ref() {
|
||||||
None => 0,
|
match segment.kind()? {
|
||||||
};
|
ast::PathSegmentKind::SuperKw => deg += 1,
|
||||||
|
ast::PathSegmentKind::Name(name) => {
|
||||||
|
next_segment = Some(name.as_name());
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
ast::PathSegmentKind::Type { .. }
|
||||||
|
| ast::PathSegmentKind::SelfTypeKw
|
||||||
|
| ast::PathSegmentKind::SelfKw
|
||||||
|
| ast::PathSegmentKind::CrateKw => return None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
ModPath::from_segments(PathKind::Super(nested_super_count + 1), iter::empty())
|
ModPath::from_segments(PathKind::Super(deg), next_segment)
|
||||||
}
|
}
|
||||||
ast::PathSegmentKind::Type { .. } => {
|
ast::PathSegmentKind::Type { .. } => {
|
||||||
// not allowed in imports
|
// not allowed in imports
|
||||||
|
@ -260,6 +254,14 @@ fn convert_path(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
for segment in segments {
|
||||||
|
let name = match segment.kind()? {
|
||||||
|
ast::PathSegmentKind::Name(name) => name.as_name(),
|
||||||
|
_ => return None,
|
||||||
|
};
|
||||||
|
mod_path.segments.push(name);
|
||||||
|
}
|
||||||
|
|
||||||
// handle local_inner_macros :
|
// handle local_inner_macros :
|
||||||
// Basically, even in rustc it is quite hacky:
|
// Basically, even in rustc it is quite hacky:
|
||||||
// https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456
|
// https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456
|
||||||
|
@ -281,6 +283,46 @@ fn convert_path(
|
||||||
Some(mod_path)
|
Some(mod_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModPath> {
|
||||||
|
let mut leaves = tt.iter().filter_map(|tt| match tt {
|
||||||
|
tt::TokenTree::Leaf(leaf) => Some(leaf),
|
||||||
|
tt::TokenTree::Subtree(_) => None,
|
||||||
|
});
|
||||||
|
let mut segments = smallvec::smallvec![];
|
||||||
|
let kind = match leaves.next()? {
|
||||||
|
tt::Leaf::Punct(tt::Punct { char: ':', .. }) => match leaves.next()? {
|
||||||
|
tt::Leaf::Punct(tt::Punct { char: ':', .. }) => PathKind::Abs,
|
||||||
|
_ => return None,
|
||||||
|
},
|
||||||
|
tt::Leaf::Ident(tt::Ident { text, span }) if text == "$crate" => {
|
||||||
|
resolve_crate_root(db, span.ctx).map(PathKind::DollarCrate).unwrap_or(PathKind::Crate)
|
||||||
|
}
|
||||||
|
tt::Leaf::Ident(tt::Ident { text, .. }) if text == "self" => PathKind::Super(0),
|
||||||
|
tt::Leaf::Ident(tt::Ident { text, .. }) if text == "super" => {
|
||||||
|
let mut deg = 1;
|
||||||
|
while let Some(tt::Leaf::Ident(tt::Ident { text, .. })) = leaves.next() {
|
||||||
|
if text != "super" {
|
||||||
|
segments.push(Name::new_text_dont_use(text.clone()));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
deg += 1;
|
||||||
|
}
|
||||||
|
PathKind::Super(deg)
|
||||||
|
}
|
||||||
|
tt::Leaf::Ident(tt::Ident { text, .. }) if text == "crate" => PathKind::Crate,
|
||||||
|
tt::Leaf::Ident(ident) => {
|
||||||
|
segments.push(Name::new_text_dont_use(ident.text.clone()));
|
||||||
|
PathKind::Plain
|
||||||
|
}
|
||||||
|
_ => return None,
|
||||||
|
};
|
||||||
|
segments.extend(leaves.filter_map(|leaf| match leaf {
|
||||||
|
::tt::Leaf::Ident(ident) => Some(Name::new_text_dont_use(ident.text.clone())),
|
||||||
|
_ => None,
|
||||||
|
}));
|
||||||
|
Some(ModPath { kind, segments })
|
||||||
|
}
|
||||||
|
|
||||||
pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> Option<CrateId> {
|
pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> Option<CrateId> {
|
||||||
// When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
|
// When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
|
||||||
// we don't want to pretend that the `macro_rules!` definition is in the `macro`
|
// we don't want to pretend that the `macro_rules!` definition is in the `macro`
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
//! Proc Macro Expander stub
|
//! Proc Macro Expander stuff
|
||||||
|
|
||||||
use core::fmt;
|
use core::fmt;
|
||||||
use std::{panic::RefUnwindSafe, sync};
|
use std::{panic::RefUnwindSafe, sync};
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
//! Span maps for real files and macro expansions.
|
//! Span maps for real files and macro expansions.
|
||||||
use span::Span;
|
use span::{FileId, HirFileId, HirFileIdRepr, MacroFileId, Span};
|
||||||
use syntax::TextRange;
|
use syntax::{AstNode, TextRange};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
pub use span::RealSpanMap;
|
pub use span::RealSpanMap;
|
||||||
|
|
||||||
|
use crate::db::ExpandDatabase;
|
||||||
|
|
||||||
pub type ExpansionSpanMap = span::SpanMap<Span>;
|
pub type ExpansionSpanMap = span::SpanMap<Span>;
|
||||||
|
|
||||||
/// Spanmap for a macro file or a real file
|
/// Spanmap for a macro file or a real file
|
||||||
|
@ -34,7 +36,6 @@ impl mbe::SpanMapper<Span> for SpanMapRef<'_> {
|
||||||
self.span_for_range(range)
|
self.span_for_range(range)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SpanMap {
|
impl SpanMap {
|
||||||
pub fn span_for_range(&self, range: TextRange) -> Span {
|
pub fn span_for_range(&self, range: TextRange) -> Span {
|
||||||
match self {
|
match self {
|
||||||
|
@ -53,6 +54,16 @@ impl SpanMap {
|
||||||
Self::RealSpanMap(span_map) => SpanMapRef::RealSpanMap(span_map),
|
Self::RealSpanMap(span_map) => SpanMapRef::RealSpanMap(span_map),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap {
|
||||||
|
match file_id.repr() {
|
||||||
|
HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
|
||||||
|
HirFileIdRepr::MacroFile(m) => {
|
||||||
|
SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SpanMapRef<'_> {
|
impl SpanMapRef<'_> {
|
||||||
|
@ -63,3 +74,30 @@ impl SpanMapRef<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<RealSpanMap> {
|
||||||
|
use syntax::ast::HasModuleItem;
|
||||||
|
let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)];
|
||||||
|
let ast_id_map = db.ast_id_map(file_id.into());
|
||||||
|
let tree = db.parse(file_id).tree();
|
||||||
|
// FIXME: Descend into modules and other item containing items that are not annotated with attributes
|
||||||
|
// and allocate pairs for those as well. This gives us finer grained span anchors resulting in
|
||||||
|
// better incrementality
|
||||||
|
pairs.extend(
|
||||||
|
tree.items()
|
||||||
|
.map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())),
|
||||||
|
);
|
||||||
|
|
||||||
|
Arc::new(RealSpanMap::from_file(
|
||||||
|
file_id,
|
||||||
|
pairs.into_boxed_slice(),
|
||||||
|
tree.syntax().text_range().end(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn expansion_span_map(
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
|
file_id: MacroFileId,
|
||||||
|
) -> Arc<ExpansionSpanMap> {
|
||||||
|
db.parse_macro_expansion(file_id).value.1
|
||||||
|
}
|
||||||
|
|
|
@ -36,6 +36,7 @@ indexmap.workspace = true
|
||||||
|
|
||||||
ra-ap-rustc_abi.workspace = true
|
ra-ap-rustc_abi.workspace = true
|
||||||
ra-ap-rustc_index.workspace = true
|
ra-ap-rustc_index.workspace = true
|
||||||
|
ra-ap-rustc_pattern_analysis.workspace = true
|
||||||
|
|
||||||
|
|
||||||
# local deps
|
# local deps
|
||||||
|
|
|
@ -142,7 +142,7 @@ pub(crate) fn deref_by_trait(
|
||||||
table @ &mut InferenceTable { db, .. }: &mut InferenceTable<'_>,
|
table @ &mut InferenceTable { db, .. }: &mut InferenceTable<'_>,
|
||||||
ty: Ty,
|
ty: Ty,
|
||||||
) -> Option<Ty> {
|
) -> Option<Ty> {
|
||||||
let _p = profile::span("deref_by_trait");
|
let _p = tracing::span!(tracing::Level::INFO, "deref_by_trait").entered();
|
||||||
if table.resolve_ty_shallow(&ty).inference_var(Interner).is_some() {
|
if table.resolve_ty_shallow(&ty).inference_var(Interner).is_some() {
|
||||||
// don't try to deref unknown variables
|
// don't try to deref unknown variables
|
||||||
return None;
|
return None;
|
||||||
|
|
|
@ -689,7 +689,7 @@ pub(crate) fn impl_datum_query(
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
impl_id: ImplId,
|
impl_id: ImplId,
|
||||||
) -> Arc<ImplDatum> {
|
) -> Arc<ImplDatum> {
|
||||||
let _p = profile::span("impl_datum");
|
let _p = tracing::span!(tracing::Level::INFO, "impl_datum").entered();
|
||||||
debug!("impl_datum {:?}", impl_id);
|
debug!("impl_datum {:?}", impl_id);
|
||||||
let impl_: hir_def::ImplId = from_chalk(db, impl_id);
|
let impl_: hir_def::ImplId = from_chalk(db, impl_id);
|
||||||
impl_def_datum(db, krate, impl_id, impl_)
|
impl_def_datum(db, krate, impl_id, impl_)
|
||||||
|
|
|
@ -118,7 +118,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
|
||||||
fn layout_of_ty(&self, ty: Ty, env: Arc<TraitEnvironment>) -> Result<Arc<Layout>, LayoutError>;
|
fn layout_of_ty(&self, ty: Ty, env: Arc<TraitEnvironment>) -> Result<Arc<Layout>, LayoutError>;
|
||||||
|
|
||||||
#[salsa::invoke(crate::layout::target_data_layout_query)]
|
#[salsa::invoke(crate::layout::target_data_layout_query)]
|
||||||
fn target_data_layout(&self, krate: CrateId) -> Option<Arc<TargetDataLayout>>;
|
fn target_data_layout(&self, krate: CrateId) -> Result<Arc<TargetDataLayout>, Arc<str>>;
|
||||||
|
|
||||||
#[salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
|
#[salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
|
||||||
fn lookup_impl_method(
|
fn lookup_impl_method(
|
||||||
|
@ -281,7 +281,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn infer_wait(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
|
fn infer_wait(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
|
||||||
let _p = profile::span("infer:wait").detail(|| match def {
|
let detail = match def {
|
||||||
DefWithBodyId::FunctionId(it) => db.function_data(it).name.display(db.upcast()).to_string(),
|
DefWithBodyId::FunctionId(it) => db.function_data(it).name.display(db.upcast()).to_string(),
|
||||||
DefWithBodyId::StaticId(it) => {
|
DefWithBodyId::StaticId(it) => {
|
||||||
db.static_data(it).name.clone().display(db.upcast()).to_string()
|
db.static_data(it).name.clone().display(db.upcast()).to_string()
|
||||||
|
@ -297,7 +297,8 @@ fn infer_wait(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult>
|
||||||
db.enum_variant_data(it).name.display(db.upcast()).to_string()
|
db.enum_variant_data(it).name.display(db.upcast()).to_string()
|
||||||
}
|
}
|
||||||
DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"),
|
DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"),
|
||||||
});
|
};
|
||||||
|
let _p = tracing::span!(tracing::Level::INFO, "infer:wait", ?detail).entered();
|
||||||
db.infer_query(def)
|
db.infer_query(def)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -307,7 +308,7 @@ fn trait_solve_wait(
|
||||||
block: Option<BlockId>,
|
block: Option<BlockId>,
|
||||||
goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
|
goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
|
||||||
) -> Option<crate::Solution> {
|
) -> Option<crate::Solution> {
|
||||||
let _p = profile::span("trait_solve::wait");
|
let _p = tracing::span!(tracing::Level::INFO, "trait_solve::wait").entered();
|
||||||
db.trait_solve_query(krate, block, goal)
|
db.trait_solve_query(krate, block, goal)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
//! Type inference-based diagnostics.
|
//! Type inference-based diagnostics.
|
||||||
|
mod decl_check;
|
||||||
mod expr;
|
mod expr;
|
||||||
mod match_check;
|
mod match_check;
|
||||||
mod unsafe_check;
|
mod unsafe_check;
|
||||||
mod decl_check;
|
|
||||||
|
|
||||||
pub use crate::diagnostics::{
|
pub use crate::diagnostics::{
|
||||||
decl_check::{incorrect_case, CaseType, IncorrectCase},
|
decl_check::{incorrect_case, CaseType, IncorrectCase},
|
||||||
|
|
|
@ -45,7 +45,7 @@ mod allow {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn incorrect_case(db: &dyn HirDatabase, owner: ModuleDefId) -> Vec<IncorrectCase> {
|
pub fn incorrect_case(db: &dyn HirDatabase, owner: ModuleDefId) -> Vec<IncorrectCase> {
|
||||||
let _p = profile::span("validate_module_item");
|
let _p = tracing::span!(tracing::Level::INFO, "validate_module_item").entered();
|
||||||
let mut validator = DeclValidator::new(db);
|
let mut validator = DeclValidator::new(db);
|
||||||
validator.validate_item(owner);
|
validator.validate_item(owner);
|
||||||
validator.sink
|
validator.sink
|
||||||
|
|
|
@ -11,6 +11,7 @@ use hir_def::{ItemContainerId, Lookup};
|
||||||
use hir_expand::name;
|
use hir_expand::name;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
|
use rustc_pattern_analysis::usefulness::{compute_match_usefulness, ValidityConstraint};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
use typed_arena::Arena;
|
use typed_arena::Arena;
|
||||||
|
|
||||||
|
@ -18,8 +19,7 @@ use crate::{
|
||||||
db::HirDatabase,
|
db::HirDatabase,
|
||||||
diagnostics::match_check::{
|
diagnostics::match_check::{
|
||||||
self,
|
self,
|
||||||
deconstruct_pat::DeconstructedPat,
|
pat_analysis::{self, DeconstructedPat, MatchCheckCtx, WitnessPat},
|
||||||
usefulness::{compute_match_usefulness, MatchCheckCtx},
|
|
||||||
},
|
},
|
||||||
display::HirDisplay,
|
display::HirDisplay,
|
||||||
InferenceResult, Ty, TyExt,
|
InferenceResult, Ty, TyExt,
|
||||||
|
@ -48,7 +48,8 @@ pub enum BodyValidationDiagnostic {
|
||||||
|
|
||||||
impl BodyValidationDiagnostic {
|
impl BodyValidationDiagnostic {
|
||||||
pub fn collect(db: &dyn HirDatabase, owner: DefWithBodyId) -> Vec<BodyValidationDiagnostic> {
|
pub fn collect(db: &dyn HirDatabase, owner: DefWithBodyId) -> Vec<BodyValidationDiagnostic> {
|
||||||
let _p = profile::span("BodyValidationDiagnostic::collect");
|
let _p =
|
||||||
|
tracing::span!(tracing::Level::INFO, "BodyValidationDiagnostic::collect").entered();
|
||||||
let infer = db.infer(owner);
|
let infer = db.infer(owner);
|
||||||
let mut validator = ExprValidator::new(owner, infer);
|
let mut validator = ExprValidator::new(owner, infer);
|
||||||
validator.validate_body(db);
|
validator.validate_body(db);
|
||||||
|
@ -152,7 +153,14 @@ impl ExprValidator {
|
||||||
}
|
}
|
||||||
|
|
||||||
let pattern_arena = Arena::new();
|
let pattern_arena = Arena::new();
|
||||||
let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db, &pattern_arena);
|
let ty_arena = Arena::new();
|
||||||
|
let cx = MatchCheckCtx::new(
|
||||||
|
self.owner.module(db.upcast()),
|
||||||
|
self.owner,
|
||||||
|
db,
|
||||||
|
&pattern_arena,
|
||||||
|
&ty_arena,
|
||||||
|
);
|
||||||
|
|
||||||
let mut m_arms = Vec::with_capacity(arms.len());
|
let mut m_arms = Vec::with_capacity(arms.len());
|
||||||
let mut has_lowering_errors = false;
|
let mut has_lowering_errors = false;
|
||||||
|
@ -178,9 +186,10 @@ impl ExprValidator {
|
||||||
// If we had a NotUsefulMatchArm diagnostic, we could
|
// If we had a NotUsefulMatchArm diagnostic, we could
|
||||||
// check the usefulness of each pattern as we added it
|
// check the usefulness of each pattern as we added it
|
||||||
// to the matrix here.
|
// to the matrix here.
|
||||||
let m_arm = match_check::MatchArm {
|
let m_arm = pat_analysis::MatchArm {
|
||||||
pat: self.lower_pattern(&cx, arm.pat, db, &body, &mut has_lowering_errors),
|
pat: self.lower_pattern(&cx, arm.pat, db, &body, &mut has_lowering_errors),
|
||||||
has_guard: arm.guard.is_some(),
|
has_guard: arm.guard.is_some(),
|
||||||
|
arm_data: (),
|
||||||
};
|
};
|
||||||
m_arms.push(m_arm);
|
m_arms.push(m_arm);
|
||||||
if !has_lowering_errors {
|
if !has_lowering_errors {
|
||||||
|
@ -197,7 +206,15 @@ impl ExprValidator {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let report = compute_match_usefulness(&cx, &m_arms, scrut_ty);
|
let report = match compute_match_usefulness(
|
||||||
|
rustc_pattern_analysis::MatchCtxt { tycx: &cx },
|
||||||
|
m_arms.as_slice(),
|
||||||
|
scrut_ty.clone(),
|
||||||
|
ValidityConstraint::ValidOnly,
|
||||||
|
) {
|
||||||
|
Ok(report) => report,
|
||||||
|
Err(void) => match void {},
|
||||||
|
};
|
||||||
|
|
||||||
// FIXME Report unreachable arms
|
// FIXME Report unreachable arms
|
||||||
// https://github.com/rust-lang/rust/blob/f31622a50/compiler/rustc_mir_build/src/thir/pattern/check_match.rs#L200
|
// https://github.com/rust-lang/rust/blob/f31622a50/compiler/rustc_mir_build/src/thir/pattern/check_match.rs#L200
|
||||||
|
@ -213,7 +230,7 @@ impl ExprValidator {
|
||||||
|
|
||||||
fn lower_pattern<'p>(
|
fn lower_pattern<'p>(
|
||||||
&self,
|
&self,
|
||||||
cx: &MatchCheckCtx<'_, 'p>,
|
cx: &MatchCheckCtx<'p>,
|
||||||
pat: PatId,
|
pat: PatId,
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
body: &Body,
|
body: &Body,
|
||||||
|
@ -221,7 +238,7 @@ impl ExprValidator {
|
||||||
) -> &'p DeconstructedPat<'p> {
|
) -> &'p DeconstructedPat<'p> {
|
||||||
let mut patcx = match_check::PatCtxt::new(db, &self.infer, body);
|
let mut patcx = match_check::PatCtxt::new(db, &self.infer, body);
|
||||||
let pattern = patcx.lower_pattern(pat);
|
let pattern = patcx.lower_pattern(pat);
|
||||||
let pattern = cx.pattern_arena.alloc(DeconstructedPat::from_pat(cx, &pattern));
|
let pattern = cx.pattern_arena.alloc(cx.lower_pat(&pattern));
|
||||||
if !patcx.errors.is_empty() {
|
if !patcx.errors.is_empty() {
|
||||||
*have_errors = true;
|
*have_errors = true;
|
||||||
}
|
}
|
||||||
|
@ -364,16 +381,16 @@ fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResul
|
||||||
}
|
}
|
||||||
|
|
||||||
fn missing_match_arms<'p>(
|
fn missing_match_arms<'p>(
|
||||||
cx: &MatchCheckCtx<'_, 'p>,
|
cx: &MatchCheckCtx<'p>,
|
||||||
scrut_ty: &Ty,
|
scrut_ty: &Ty,
|
||||||
witnesses: Vec<DeconstructedPat<'p>>,
|
witnesses: Vec<WitnessPat<'p>>,
|
||||||
arms: &[MatchArm],
|
arms: &[MatchArm],
|
||||||
) -> String {
|
) -> String {
|
||||||
struct DisplayWitness<'a, 'p>(&'a DeconstructedPat<'p>, &'a MatchCheckCtx<'a, 'p>);
|
struct DisplayWitness<'a, 'p>(&'a WitnessPat<'p>, &'a MatchCheckCtx<'p>);
|
||||||
impl fmt::Display for DisplayWitness<'_, '_> {
|
impl fmt::Display for DisplayWitness<'_, '_> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let DisplayWitness(witness, cx) = *self;
|
let DisplayWitness(witness, cx) = *self;
|
||||||
let pat = witness.to_pat(cx);
|
let pat = cx.hoist_witness_pat(witness);
|
||||||
write!(f, "{}", pat.display(cx.db))
|
write!(f, "{}", pat.display(cx.db))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,8 +7,7 @@
|
||||||
|
|
||||||
mod pat_util;
|
mod pat_util;
|
||||||
|
|
||||||
pub(crate) mod deconstruct_pat;
|
pub(crate) mod pat_analysis;
|
||||||
pub(crate) mod usefulness;
|
|
||||||
|
|
||||||
use chalk_ir::Mutability;
|
use chalk_ir::Mutability;
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
|
@ -27,8 +26,6 @@ use crate::{
|
||||||
|
|
||||||
use self::pat_util::EnumerateAndAdjustIterator;
|
use self::pat_util::EnumerateAndAdjustIterator;
|
||||||
|
|
||||||
pub(crate) use self::usefulness::MatchArm;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub(crate) enum PatternError {
|
pub(crate) enum PatternError {
|
||||||
Unimplemented,
|
Unimplemented,
|
||||||
|
@ -413,98 +410,3 @@ where
|
||||||
(self.0)(f)
|
(self.0)(f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) trait PatternFoldable: Sized {
|
|
||||||
fn fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
|
|
||||||
self.super_fold_with(folder)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) trait PatternFolder: Sized {
|
|
||||||
fn fold_pattern(&mut self, pattern: &Pat) -> Pat {
|
|
||||||
pattern.super_fold_with(self)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn fold_pattern_kind(&mut self, kind: &PatKind) -> PatKind {
|
|
||||||
kind.super_fold_with(self)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: PatternFoldable> PatternFoldable for Box<T> {
|
|
||||||
fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
|
|
||||||
let content: T = (**self).fold_with(folder);
|
|
||||||
Box::new(content)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: PatternFoldable> PatternFoldable for Vec<T> {
|
|
||||||
fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
|
|
||||||
self.iter().map(|t| t.fold_with(folder)).collect()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: PatternFoldable> PatternFoldable for Option<T> {
|
|
||||||
fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
|
|
||||||
self.as_ref().map(|t| t.fold_with(folder))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! clone_impls {
|
|
||||||
($($ty:ty),+) => {
|
|
||||||
$(
|
|
||||||
impl PatternFoldable for $ty {
|
|
||||||
fn super_fold_with<F: PatternFolder>(&self, _: &mut F) -> Self {
|
|
||||||
Clone::clone(self)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)+
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
clone_impls! { LocalFieldId, Ty, Substitution, EnumVariantId }
|
|
||||||
|
|
||||||
impl PatternFoldable for FieldPat {
|
|
||||||
fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
|
|
||||||
FieldPat { field: self.field.fold_with(folder), pattern: self.pattern.fold_with(folder) }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PatternFoldable for Pat {
|
|
||||||
fn fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
|
|
||||||
folder.fold_pattern(self)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
|
|
||||||
Pat { ty: self.ty.fold_with(folder), kind: self.kind.fold_with(folder) }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PatternFoldable for PatKind {
|
|
||||||
fn fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
|
|
||||||
folder.fold_pattern_kind(self)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
|
|
||||||
match self {
|
|
||||||
PatKind::Wild => PatKind::Wild,
|
|
||||||
PatKind::Binding { name, subpattern } => {
|
|
||||||
PatKind::Binding { name: name.clone(), subpattern: subpattern.fold_with(folder) }
|
|
||||||
}
|
|
||||||
PatKind::Variant { substs, enum_variant, subpatterns } => PatKind::Variant {
|
|
||||||
substs: substs.fold_with(folder),
|
|
||||||
enum_variant: enum_variant.fold_with(folder),
|
|
||||||
subpatterns: subpatterns.fold_with(folder),
|
|
||||||
},
|
|
||||||
PatKind::Leaf { subpatterns } => {
|
|
||||||
PatKind::Leaf { subpatterns: subpatterns.fold_with(folder) }
|
|
||||||
}
|
|
||||||
PatKind::Deref { subpattern } => {
|
|
||||||
PatKind::Deref { subpattern: subpattern.fold_with(folder) }
|
|
||||||
}
|
|
||||||
&PatKind::LiteralBool { value } => PatKind::LiteralBool { value },
|
|
||||||
PatKind::Or { pats } => PatKind::Or { pats: pats.fold_with(folder) },
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
File diff suppressed because it is too large
Load diff
476
crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
Normal file
476
crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
Normal file
|
@ -0,0 +1,476 @@
|
||||||
|
//! Interface with `rustc_pattern_analysis`.
|
||||||
|
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
|
use hir_def::{DefWithBodyId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
|
||||||
|
use rustc_hash::FxHashMap;
|
||||||
|
use rustc_pattern_analysis::{
|
||||||
|
constructor::{Constructor, ConstructorSet, VariantVisibility},
|
||||||
|
index::IdxContainer,
|
||||||
|
Captures, TypeCx,
|
||||||
|
};
|
||||||
|
use smallvec::SmallVec;
|
||||||
|
use stdx::never;
|
||||||
|
use typed_arena::Arena;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
db::HirDatabase,
|
||||||
|
infer::normalize,
|
||||||
|
inhabitedness::{is_enum_variant_uninhabited_from, is_ty_uninhabited_from},
|
||||||
|
AdtId, Interner, Scalar, Ty, TyExt, TyKind,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::{is_box, FieldPat, Pat, PatKind};
|
||||||
|
|
||||||
|
use Constructor::*;
|
||||||
|
|
||||||
|
// Re-export r-a-specific versions of all these types.
|
||||||
|
pub(crate) type DeconstructedPat<'p> =
|
||||||
|
rustc_pattern_analysis::pat::DeconstructedPat<'p, MatchCheckCtx<'p>>;
|
||||||
|
pub(crate) type MatchArm<'p> = rustc_pattern_analysis::MatchArm<'p, MatchCheckCtx<'p>>;
|
||||||
|
pub(crate) type WitnessPat<'p> = rustc_pattern_analysis::pat::WitnessPat<MatchCheckCtx<'p>>;
|
||||||
|
|
||||||
|
/// [Constructor] uses this in unimplemented variants.
|
||||||
|
/// It allows porting match expressions from upstream algorithm without losing semantics.
|
||||||
|
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||||
|
pub(crate) enum Void {}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub(crate) struct MatchCheckCtx<'p> {
|
||||||
|
module: ModuleId,
|
||||||
|
body: DefWithBodyId,
|
||||||
|
pub(crate) db: &'p dyn HirDatabase,
|
||||||
|
pub(crate) pattern_arena: &'p Arena<DeconstructedPat<'p>>,
|
||||||
|
ty_arena: &'p Arena<Ty>,
|
||||||
|
exhaustive_patterns: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'p> MatchCheckCtx<'p> {
|
||||||
|
pub(crate) fn new(
|
||||||
|
module: ModuleId,
|
||||||
|
body: DefWithBodyId,
|
||||||
|
db: &'p dyn HirDatabase,
|
||||||
|
pattern_arena: &'p Arena<DeconstructedPat<'p>>,
|
||||||
|
ty_arena: &'p Arena<Ty>,
|
||||||
|
) -> Self {
|
||||||
|
let def_map = db.crate_def_map(module.krate());
|
||||||
|
let exhaustive_patterns = def_map.is_unstable_feature_enabled("exhaustive_patterns");
|
||||||
|
Self { module, body, db, pattern_arena, exhaustive_patterns, ty_arena }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_uninhabited(&self, ty: &Ty) -> bool {
|
||||||
|
is_ty_uninhabited_from(ty, self.module, self.db)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns whether the given type is an enum from another crate declared `#[non_exhaustive]`.
|
||||||
|
fn is_foreign_non_exhaustive_enum(&self, ty: &Ty) -> bool {
|
||||||
|
match ty.as_adt() {
|
||||||
|
Some((adt @ hir_def::AdtId::EnumId(_), _)) => {
|
||||||
|
let has_non_exhaustive_attr =
|
||||||
|
self.db.attrs(adt.into()).by_key("non_exhaustive").exists();
|
||||||
|
let is_local = adt.module(self.db.upcast()).krate() == self.module.krate();
|
||||||
|
has_non_exhaustive_attr && !is_local
|
||||||
|
}
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn variant_id_for_adt(&self, ctor: &Constructor<Self>, adt: hir_def::AdtId) -> VariantId {
|
||||||
|
match ctor {
|
||||||
|
&Variant(id) => id.into(),
|
||||||
|
Struct | UnionField => {
|
||||||
|
assert!(!matches!(adt, hir_def::AdtId::EnumId(_)));
|
||||||
|
match adt {
|
||||||
|
hir_def::AdtId::EnumId(_) => unreachable!(),
|
||||||
|
hir_def::AdtId::StructId(id) => id.into(),
|
||||||
|
hir_def::AdtId::UnionId(id) => id.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => panic!("bad constructor {self:?} for adt {adt:?}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// In the cases of either a `#[non_exhaustive]` field list or a non-public field, we hide
|
||||||
|
// uninhabited fields in order not to reveal the uninhabitedness of the whole variant.
|
||||||
|
// This lists the fields we keep along with their types.
|
||||||
|
fn list_variant_nonhidden_fields<'a>(
|
||||||
|
&'a self,
|
||||||
|
ty: &'a Ty,
|
||||||
|
variant: VariantId,
|
||||||
|
) -> impl Iterator<Item = (LocalFieldId, Ty)> + Captures<'a> + Captures<'p> {
|
||||||
|
let cx = self;
|
||||||
|
let (adt, substs) = ty.as_adt().unwrap();
|
||||||
|
|
||||||
|
let adt_is_local = variant.module(cx.db.upcast()).krate() == cx.module.krate();
|
||||||
|
|
||||||
|
// Whether we must not match the fields of this variant exhaustively.
|
||||||
|
let is_non_exhaustive =
|
||||||
|
cx.db.attrs(variant.into()).by_key("non_exhaustive").exists() && !adt_is_local;
|
||||||
|
|
||||||
|
let visibility = cx.db.field_visibilities(variant);
|
||||||
|
let field_ty = cx.db.field_types(variant);
|
||||||
|
let fields_len = variant.variant_data(cx.db.upcast()).fields().len() as u32;
|
||||||
|
|
||||||
|
(0..fields_len).map(|idx| LocalFieldId::from_raw(idx.into())).filter_map(move |fid| {
|
||||||
|
let ty = field_ty[fid].clone().substitute(Interner, substs);
|
||||||
|
let ty = normalize(cx.db, cx.db.trait_environment_for_body(cx.body), ty);
|
||||||
|
let is_visible = matches!(adt, hir_def::AdtId::EnumId(..))
|
||||||
|
|| visibility[fid].is_visible_from(cx.db.upcast(), cx.module);
|
||||||
|
let is_uninhabited = cx.is_uninhabited(&ty);
|
||||||
|
|
||||||
|
if is_uninhabited && (!is_visible || is_non_exhaustive) {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some((fid, ty))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn lower_pat(&self, pat: &Pat) -> DeconstructedPat<'p> {
|
||||||
|
let singleton = |pat| std::slice::from_ref(self.pattern_arena.alloc(pat));
|
||||||
|
let ctor;
|
||||||
|
let fields: &[_];
|
||||||
|
|
||||||
|
match pat.kind.as_ref() {
|
||||||
|
PatKind::Binding { subpattern: Some(subpat), .. } => return self.lower_pat(subpat),
|
||||||
|
PatKind::Binding { subpattern: None, .. } | PatKind::Wild => {
|
||||||
|
ctor = Wildcard;
|
||||||
|
fields = &[];
|
||||||
|
}
|
||||||
|
PatKind::Deref { subpattern } => {
|
||||||
|
ctor = match pat.ty.kind(Interner) {
|
||||||
|
// This is a box pattern.
|
||||||
|
TyKind::Adt(adt, _) if is_box(self.db, adt.0) => Struct,
|
||||||
|
TyKind::Ref(..) => Ref,
|
||||||
|
_ => {
|
||||||
|
never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty);
|
||||||
|
Wildcard
|
||||||
|
}
|
||||||
|
};
|
||||||
|
fields = singleton(self.lower_pat(subpattern));
|
||||||
|
}
|
||||||
|
PatKind::Leaf { subpatterns } | PatKind::Variant { subpatterns, .. } => {
|
||||||
|
match pat.ty.kind(Interner) {
|
||||||
|
TyKind::Tuple(_, substs) => {
|
||||||
|
ctor = Struct;
|
||||||
|
let mut wilds: SmallVec<[_; 2]> = substs
|
||||||
|
.iter(Interner)
|
||||||
|
.map(|arg| arg.assert_ty_ref(Interner).clone())
|
||||||
|
.map(DeconstructedPat::wildcard)
|
||||||
|
.collect();
|
||||||
|
for pat in subpatterns {
|
||||||
|
let idx: u32 = pat.field.into_raw().into();
|
||||||
|
wilds[idx as usize] = self.lower_pat(&pat.pattern);
|
||||||
|
}
|
||||||
|
fields = self.pattern_arena.alloc_extend(wilds)
|
||||||
|
}
|
||||||
|
TyKind::Adt(adt, substs) if is_box(self.db, adt.0) => {
|
||||||
|
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
||||||
|
// patterns. If we're here we can assume this is a box pattern.
|
||||||
|
// FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_,
|
||||||
|
// _)` or a box pattern. As a hack to avoid an ICE with the former, we
|
||||||
|
// ignore other fields than the first one. This will trigger an error later
|
||||||
|
// anyway.
|
||||||
|
// See https://github.com/rust-lang/rust/issues/82772 ,
|
||||||
|
// explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977
|
||||||
|
// The problem is that we can't know from the type whether we'll match
|
||||||
|
// normally or through box-patterns. We'll have to figure out a proper
|
||||||
|
// solution when we introduce generalized deref patterns. Also need to
|
||||||
|
// prevent mixing of those two options.
|
||||||
|
let pat =
|
||||||
|
subpatterns.iter().find(|pat| pat.field.into_raw() == 0u32.into());
|
||||||
|
let field = if let Some(pat) = pat {
|
||||||
|
self.lower_pat(&pat.pattern)
|
||||||
|
} else {
|
||||||
|
let ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
|
||||||
|
DeconstructedPat::wildcard(ty)
|
||||||
|
};
|
||||||
|
ctor = Struct;
|
||||||
|
fields = singleton(field);
|
||||||
|
}
|
||||||
|
&TyKind::Adt(adt, _) => {
|
||||||
|
ctor = match pat.kind.as_ref() {
|
||||||
|
PatKind::Leaf { .. } if matches!(adt.0, hir_def::AdtId::UnionId(_)) => {
|
||||||
|
UnionField
|
||||||
|
}
|
||||||
|
PatKind::Leaf { .. } => Struct,
|
||||||
|
PatKind::Variant { enum_variant, .. } => Variant(*enum_variant),
|
||||||
|
_ => {
|
||||||
|
never!();
|
||||||
|
Wildcard
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let variant = self.variant_id_for_adt(&ctor, adt.0);
|
||||||
|
let fields_len = variant.variant_data(self.db.upcast()).fields().len();
|
||||||
|
// For each field in the variant, we store the relevant index into `self.fields` if any.
|
||||||
|
let mut field_id_to_id: Vec<Option<usize>> = vec![None; fields_len];
|
||||||
|
let tys = self
|
||||||
|
.list_variant_nonhidden_fields(&pat.ty, variant)
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, (fid, ty))| {
|
||||||
|
let field_idx: u32 = fid.into_raw().into();
|
||||||
|
field_id_to_id[field_idx as usize] = Some(i);
|
||||||
|
ty
|
||||||
|
});
|
||||||
|
let mut wilds: SmallVec<[_; 2]> =
|
||||||
|
tys.map(DeconstructedPat::wildcard).collect();
|
||||||
|
for pat in subpatterns {
|
||||||
|
let field_idx: u32 = pat.field.into_raw().into();
|
||||||
|
if let Some(i) = field_id_to_id[field_idx as usize] {
|
||||||
|
wilds[i] = self.lower_pat(&pat.pattern);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fields = self.pattern_arena.alloc_extend(wilds);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty);
|
||||||
|
ctor = Wildcard;
|
||||||
|
fields = &[];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
&PatKind::LiteralBool { value } => {
|
||||||
|
ctor = Bool(value);
|
||||||
|
fields = &[];
|
||||||
|
}
|
||||||
|
PatKind::Or { pats } => {
|
||||||
|
ctor = Or;
|
||||||
|
// Collect here because `Arena::alloc_extend` panics on reentrancy.
|
||||||
|
let subpats: SmallVec<[_; 2]> =
|
||||||
|
pats.iter().map(|pat| self.lower_pat(pat)).collect();
|
||||||
|
fields = self.pattern_arena.alloc_extend(subpats);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
DeconstructedPat::new(ctor, fields, pat.ty.clone(), ())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn hoist_witness_pat(&self, pat: &WitnessPat<'p>) -> Pat {
|
||||||
|
let mut subpatterns = pat.iter_fields().map(|p| self.hoist_witness_pat(p));
|
||||||
|
let kind = match pat.ctor() {
|
||||||
|
&Bool(value) => PatKind::LiteralBool { value },
|
||||||
|
IntRange(_) => unimplemented!(),
|
||||||
|
Struct | Variant(_) | UnionField => match pat.ty().kind(Interner) {
|
||||||
|
TyKind::Tuple(..) => PatKind::Leaf {
|
||||||
|
subpatterns: subpatterns
|
||||||
|
.zip(0u32..)
|
||||||
|
.map(|(p, i)| FieldPat {
|
||||||
|
field: LocalFieldId::from_raw(i.into()),
|
||||||
|
pattern: p,
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
},
|
||||||
|
TyKind::Adt(adt, _) if is_box(self.db, adt.0) => {
|
||||||
|
// Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
|
||||||
|
// of `std`). So this branch is only reachable when the feature is enabled and
|
||||||
|
// the pattern is a box pattern.
|
||||||
|
PatKind::Deref { subpattern: subpatterns.next().unwrap() }
|
||||||
|
}
|
||||||
|
TyKind::Adt(adt, substs) => {
|
||||||
|
let variant = self.variant_id_for_adt(pat.ctor(), adt.0);
|
||||||
|
let subpatterns = self
|
||||||
|
.list_variant_nonhidden_fields(pat.ty(), variant)
|
||||||
|
.zip(subpatterns)
|
||||||
|
.map(|((field, _ty), pattern)| FieldPat { field, pattern })
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
if let VariantId::EnumVariantId(enum_variant) = variant {
|
||||||
|
PatKind::Variant { substs: substs.clone(), enum_variant, subpatterns }
|
||||||
|
} else {
|
||||||
|
PatKind::Leaf { subpatterns }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
never!("unexpected ctor for type {:?} {:?}", pat.ctor(), pat.ty());
|
||||||
|
PatKind::Wild
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
|
||||||
|
// be careful to reconstruct the correct constant pattern here. However a string
|
||||||
|
// literal pattern will never be reported as a non-exhaustiveness witness, so we
|
||||||
|
// ignore this issue.
|
||||||
|
Ref => PatKind::Deref { subpattern: subpatterns.next().unwrap() },
|
||||||
|
Slice(_) => unimplemented!(),
|
||||||
|
&Str(void) => match void {},
|
||||||
|
Wildcard | NonExhaustive | Hidden => PatKind::Wild,
|
||||||
|
Missing | F32Range(..) | F64Range(..) | Opaque(..) | Or => {
|
||||||
|
never!("can't convert to pattern: {:?}", pat.ctor());
|
||||||
|
PatKind::Wild
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Pat { ty: pat.ty().clone(), kind: Box::new(kind) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'p> TypeCx for MatchCheckCtx<'p> {
|
||||||
|
type Error = Void;
|
||||||
|
type Ty = Ty;
|
||||||
|
type VariantIdx = EnumVariantId;
|
||||||
|
type StrLit = Void;
|
||||||
|
type ArmData = ();
|
||||||
|
type PatData = ();
|
||||||
|
|
||||||
|
fn is_exhaustive_patterns_feature_on(&self) -> bool {
|
||||||
|
self.exhaustive_patterns
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ctor_arity(
|
||||||
|
&self,
|
||||||
|
ctor: &rustc_pattern_analysis::constructor::Constructor<Self>,
|
||||||
|
ty: &Self::Ty,
|
||||||
|
) -> usize {
|
||||||
|
match ctor {
|
||||||
|
Struct | Variant(_) | UnionField => match *ty.kind(Interner) {
|
||||||
|
TyKind::Tuple(arity, ..) => arity,
|
||||||
|
TyKind::Adt(AdtId(adt), ..) => {
|
||||||
|
if is_box(self.db, adt) {
|
||||||
|
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
||||||
|
// patterns. If we're here we can assume this is a box pattern.
|
||||||
|
1
|
||||||
|
} else {
|
||||||
|
let variant = self.variant_id_for_adt(ctor, adt);
|
||||||
|
self.list_variant_nonhidden_fields(ty, variant).count()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
never!("Unexpected type for `Single` constructor: {:?}", ty);
|
||||||
|
0
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Ref => 1,
|
||||||
|
Slice(..) => unimplemented!(),
|
||||||
|
Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) | Opaque(..)
|
||||||
|
| NonExhaustive | Hidden | Missing | Wildcard => 0,
|
||||||
|
Or => {
|
||||||
|
never!("The `Or` constructor doesn't have a fixed arity");
|
||||||
|
0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ctor_sub_tys(
|
||||||
|
&self,
|
||||||
|
ctor: &rustc_pattern_analysis::constructor::Constructor<Self>,
|
||||||
|
ty: &Self::Ty,
|
||||||
|
) -> &[Self::Ty] {
|
||||||
|
use std::iter::once;
|
||||||
|
fn alloc<'a>(cx: &'a MatchCheckCtx<'_>, iter: impl Iterator<Item = Ty>) -> &'a [Ty] {
|
||||||
|
cx.ty_arena.alloc_extend(iter)
|
||||||
|
}
|
||||||
|
match ctor {
|
||||||
|
Struct | Variant(_) | UnionField => match ty.kind(Interner) {
|
||||||
|
TyKind::Tuple(_, substs) => {
|
||||||
|
let tys = substs.iter(Interner).map(|ty| ty.assert_ty_ref(Interner));
|
||||||
|
alloc(self, tys.cloned())
|
||||||
|
}
|
||||||
|
TyKind::Ref(.., rty) => alloc(self, once(rty.clone())),
|
||||||
|
&TyKind::Adt(AdtId(adt), ref substs) => {
|
||||||
|
if is_box(self.db, adt) {
|
||||||
|
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
||||||
|
// patterns. If we're here we can assume this is a box pattern.
|
||||||
|
let subst_ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
|
||||||
|
alloc(self, once(subst_ty))
|
||||||
|
} else {
|
||||||
|
let variant = self.variant_id_for_adt(ctor, adt);
|
||||||
|
let tys = self.list_variant_nonhidden_fields(ty, variant).map(|(_, ty)| ty);
|
||||||
|
alloc(self, tys)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ty_kind => {
|
||||||
|
never!("Unexpected type for `{:?}` constructor: {:?}", ctor, ty_kind);
|
||||||
|
alloc(self, once(ty.clone()))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Ref => match ty.kind(Interner) {
|
||||||
|
TyKind::Ref(.., rty) => alloc(self, once(rty.clone())),
|
||||||
|
ty_kind => {
|
||||||
|
never!("Unexpected type for `{:?}` constructor: {:?}", ctor, ty_kind);
|
||||||
|
alloc(self, once(ty.clone()))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Slice(_) => unreachable!("Found a `Slice` constructor in match checking"),
|
||||||
|
Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) | Opaque(..)
|
||||||
|
| NonExhaustive | Hidden | Missing | Wildcard => &[],
|
||||||
|
Or => {
|
||||||
|
never!("called `Fields::wildcards` on an `Or` ctor");
|
||||||
|
&[]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ctors_for_ty(
|
||||||
|
&self,
|
||||||
|
ty: &Self::Ty,
|
||||||
|
) -> Result<rustc_pattern_analysis::constructor::ConstructorSet<Self>, Self::Error> {
|
||||||
|
let cx = self;
|
||||||
|
|
||||||
|
// Unhandled types are treated as non-exhaustive. Being explicit here instead of falling
|
||||||
|
// to catchall arm to ease further implementation.
|
||||||
|
let unhandled = || ConstructorSet::Unlistable;
|
||||||
|
|
||||||
|
// This determines the set of all possible constructors for the type `ty`. For numbers,
|
||||||
|
// arrays and slices we use ranges and variable-length slices when appropriate.
|
||||||
|
//
|
||||||
|
// If the `exhaustive_patterns` feature is enabled, we make sure to omit constructors that
|
||||||
|
// are statically impossible. E.g., for `Option<!>`, we do not include `Some(_)` in the
|
||||||
|
// returned list of constructors.
|
||||||
|
// Invariant: this is empty if and only if the type is uninhabited (as determined by
|
||||||
|
// `cx.is_uninhabited()`).
|
||||||
|
Ok(match ty.kind(Interner) {
|
||||||
|
TyKind::Scalar(Scalar::Bool) => ConstructorSet::Bool,
|
||||||
|
TyKind::Scalar(Scalar::Char) => unhandled(),
|
||||||
|
TyKind::Scalar(Scalar::Int(..) | Scalar::Uint(..)) => unhandled(),
|
||||||
|
TyKind::Array(..) | TyKind::Slice(..) => unhandled(),
|
||||||
|
TyKind::Adt(AdtId(hir_def::AdtId::EnumId(enum_id)), subst) => {
|
||||||
|
let enum_data = cx.db.enum_data(*enum_id);
|
||||||
|
let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive_enum(ty);
|
||||||
|
|
||||||
|
if enum_data.variants.is_empty() && !is_declared_nonexhaustive {
|
||||||
|
ConstructorSet::NoConstructors
|
||||||
|
} else {
|
||||||
|
let mut variants = FxHashMap::default();
|
||||||
|
for &(variant, _) in enum_data.variants.iter() {
|
||||||
|
let is_uninhabited =
|
||||||
|
is_enum_variant_uninhabited_from(variant, subst, cx.module, cx.db);
|
||||||
|
let visibility = if is_uninhabited {
|
||||||
|
VariantVisibility::Empty
|
||||||
|
} else {
|
||||||
|
VariantVisibility::Visible
|
||||||
|
};
|
||||||
|
variants.insert(variant, visibility);
|
||||||
|
}
|
||||||
|
|
||||||
|
ConstructorSet::Variants {
|
||||||
|
variants: IdxContainer(variants),
|
||||||
|
non_exhaustive: is_declared_nonexhaustive,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TyKind::Adt(AdtId(hir_def::AdtId::UnionId(_)), _) => ConstructorSet::Union,
|
||||||
|
TyKind::Adt(..) | TyKind::Tuple(..) => {
|
||||||
|
ConstructorSet::Struct { empty: cx.is_uninhabited(ty) }
|
||||||
|
}
|
||||||
|
TyKind::Ref(..) => ConstructorSet::Ref,
|
||||||
|
TyKind::Never => ConstructorSet::NoConstructors,
|
||||||
|
// This type is one for which we cannot list constructors, like `str` or `f64`.
|
||||||
|
_ => ConstructorSet::Unlistable,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn debug_pat(
|
||||||
|
_f: &mut fmt::Formatter<'_>,
|
||||||
|
_pat: &rustc_pattern_analysis::pat::DeconstructedPat<'_, Self>,
|
||||||
|
) -> fmt::Result {
|
||||||
|
// FIXME: implement this, as using `unimplemented!()` causes panics in `tracing`.
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn bug(&self, fmt: fmt::Arguments<'_>) -> ! {
|
||||||
|
panic!("{}", fmt)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'p> fmt::Debug for MatchCheckCtx<'p> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.debug_struct("MatchCheckCtx").finish()
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,824 +0,0 @@
|
||||||
//! Based on rust-lang/rust (last sync f31622a50 2021-11-12)
|
|
||||||
//! <https://github.com/rust-lang/rust/blob/f31622a50/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs>
|
|
||||||
//!
|
|
||||||
//! -----
|
|
||||||
//!
|
|
||||||
//! This file includes the logic for exhaustiveness and reachability checking for pattern-matching.
|
|
||||||
//! Specifically, given a list of patterns for a type, we can tell whether:
|
|
||||||
//! (a) each pattern is reachable (reachability)
|
|
||||||
//! (b) the patterns cover every possible value for the type (exhaustiveness)
|
|
||||||
//!
|
|
||||||
//! The algorithm implemented here is a modified version of the one described in [this
|
|
||||||
//! paper](http://moscova.inria.fr/~maranget/papers/warn/index.html). We have however generalized
|
|
||||||
//! it to accommodate the variety of patterns that Rust supports. We thus explain our version here,
|
|
||||||
//! without being as rigorous.
|
|
||||||
//!
|
|
||||||
//!
|
|
||||||
//! # Summary
|
|
||||||
//!
|
|
||||||
//! The core of the algorithm is the notion of "usefulness". A pattern `q` is said to be *useful*
|
|
||||||
//! relative to another pattern `p` of the same type if there is a value that is matched by `q` and
|
|
||||||
//! not matched by `p`. This generalizes to many `p`s: `q` is useful w.r.t. a list of patterns
|
|
||||||
//! `p_1 .. p_n` if there is a value that is matched by `q` and by none of the `p_i`. We write
|
|
||||||
//! `usefulness(p_1 .. p_n, q)` for a function that returns a list of such values. The aim of this
|
|
||||||
//! file is to compute it efficiently.
|
|
||||||
//!
|
|
||||||
//! This is enough to compute reachability: a pattern in a `match` expression is reachable iff it
|
|
||||||
//! is useful w.r.t. the patterns above it:
|
|
||||||
//! ```rust
|
|
||||||
//! match x {
|
|
||||||
//! Some(_) => ...,
|
|
||||||
//! None => ..., // reachable: `None` is matched by this but not the branch above
|
|
||||||
//! Some(0) => ..., // unreachable: all the values this matches are already matched by
|
|
||||||
//! // `Some(_)` above
|
|
||||||
//! }
|
|
||||||
//! ```
|
|
||||||
//!
|
|
||||||
//! This is also enough to compute exhaustiveness: a match is exhaustive iff the wildcard `_`
|
|
||||||
//! pattern is _not_ useful w.r.t. the patterns in the match. The values returned by `usefulness`
|
|
||||||
//! are used to tell the user which values are missing.
|
|
||||||
//! ```rust
|
|
||||||
//! match x {
|
|
||||||
//! Some(0) => ...,
|
|
||||||
//! None => ...,
|
|
||||||
//! // not exhaustive: `_` is useful because it matches `Some(1)`
|
|
||||||
//! }
|
|
||||||
//! ```
|
|
||||||
//!
|
|
||||||
//! The entrypoint of this file is the [`compute_match_usefulness`] function, which computes
|
|
||||||
//! reachability for each match branch and exhaustiveness for the whole match.
|
|
||||||
//!
|
|
||||||
//!
|
|
||||||
//! # Constructors and fields
|
|
||||||
//!
|
|
||||||
//! Note: we will often abbreviate "constructor" as "ctor".
|
|
||||||
//!
|
|
||||||
//! The idea that powers everything that is done in this file is the following: a (matcheable)
|
|
||||||
//! value is made from a constructor applied to a number of subvalues. Examples of constructors are
|
|
||||||
//! `Some`, `None`, `(,)` (the 2-tuple constructor), `Foo {..}` (the constructor for a struct
|
|
||||||
//! `Foo`), and `2` (the constructor for the number `2`). This is natural when we think of
|
|
||||||
//! pattern-matching, and this is the basis for what follows.
|
|
||||||
//!
|
|
||||||
//! Some of the ctors listed above might feel weird: `None` and `2` don't take any arguments.
|
|
||||||
//! That's ok: those are ctors that take a list of 0 arguments; they are the simplest case of
|
|
||||||
//! ctors. We treat `2` as a ctor because `u64` and other number types behave exactly like a huge
|
|
||||||
//! `enum`, with one variant for each number. This allows us to see any matcheable value as made up
|
|
||||||
//! from a tree of ctors, each having a set number of children. For example: `Foo { bar: None,
|
|
||||||
//! baz: Ok(0) }` is made from 4 different ctors, namely `Foo{..}`, `None`, `Ok` and `0`.
|
|
||||||
//!
|
|
||||||
//! This idea can be extended to patterns: they are also made from constructors applied to fields.
|
|
||||||
//! A pattern for a given type is allowed to use all the ctors for values of that type (which we
|
|
||||||
//! call "value constructors"), but there are also pattern-only ctors. The most important one is
|
|
||||||
//! the wildcard (`_`), and the others are integer ranges (`0..=10`), variable-length slices (`[x,
|
|
||||||
//! ..]`), and or-patterns (`Ok(0) | Err(_)`). Examples of valid patterns are `42`, `Some(_)`, `Foo
|
|
||||||
//! { bar: Some(0) | None, baz: _ }`. Note that a binder in a pattern (e.g. `Some(x)`) matches the
|
|
||||||
//! same values as a wildcard (e.g. `Some(_)`), so we treat both as wildcards.
|
|
||||||
//!
|
|
||||||
//! From this deconstruction we can compute whether a given value matches a given pattern; we
|
|
||||||
//! simply look at ctors one at a time. Given a pattern `p` and a value `v`, we want to compute
|
|
||||||
//! `matches!(v, p)`. It's mostly straightforward: we compare the head ctors and when they match
|
|
||||||
//! we compare their fields recursively. A few representative examples:
|
|
||||||
//!
|
|
||||||
//! - `matches!(v, _) := true`
|
|
||||||
//! - `matches!((v0, v1), (p0, p1)) := matches!(v0, p0) && matches!(v1, p1)`
|
|
||||||
//! - `matches!(Foo { bar: v0, baz: v1 }, Foo { bar: p0, baz: p1 }) := matches!(v0, p0) && matches!(v1, p1)`
|
|
||||||
//! - `matches!(Ok(v0), Ok(p0)) := matches!(v0, p0)`
|
|
||||||
//! - `matches!(Ok(v0), Err(p0)) := false` (incompatible variants)
|
|
||||||
//! - `matches!(v, 1..=100) := matches!(v, 1) || ... || matches!(v, 100)`
|
|
||||||
//! - `matches!([v0], [p0, .., p1]) := false` (incompatible lengths)
|
|
||||||
//! - `matches!([v0, v1, v2], [p0, .., p1]) := matches!(v0, p0) && matches!(v2, p1)`
|
|
||||||
//! - `matches!(v, p0 | p1) := matches!(v, p0) || matches!(v, p1)`
|
|
||||||
//!
|
|
||||||
//! Constructors, fields and relevant operations are defined in the [`super::deconstruct_pat`] module.
|
|
||||||
//!
|
|
||||||
//! Note: this constructors/fields distinction may not straightforwardly apply to every Rust type.
|
|
||||||
//! For example a value of type `Rc<u64>` can't be deconstructed that way, and `&str` has an
|
|
||||||
//! infinitude of constructors. There are also subtleties with visibility of fields and
|
|
||||||
//! uninhabitedness and various other things. The constructors idea can be extended to handle most
|
|
||||||
//! of these subtleties though; caveats are documented where relevant throughout the code.
|
|
||||||
//!
|
|
||||||
//! Whether constructors cover each other is computed by [`Constructor::is_covered_by`].
|
|
||||||
//!
|
|
||||||
//!
|
|
||||||
//! # Specialization
|
|
||||||
//!
|
|
||||||
//! Recall that we wish to compute `usefulness(p_1 .. p_n, q)`: given a list of patterns `p_1 ..
|
|
||||||
//! p_n` and a pattern `q`, all of the same type, we want to find a list of values (called
|
|
||||||
//! "witnesses") that are matched by `q` and by none of the `p_i`. We obviously don't just
|
|
||||||
//! enumerate all possible values. From the discussion above we see that we can proceed
|
|
||||||
//! ctor-by-ctor: for each value ctor of the given type, we ask "is there a value that starts with
|
|
||||||
//! this constructor and matches `q` and none of the `p_i`?". As we saw above, there's a lot we can
|
|
||||||
//! say from knowing only the first constructor of our candidate value.
|
|
||||||
//!
|
|
||||||
//! Let's take the following example:
|
|
||||||
//! ```
|
|
||||||
//! match x {
|
|
||||||
//! Enum::Variant1(_) => {} // `p1`
|
|
||||||
//! Enum::Variant2(None, 0) => {} // `p2`
|
|
||||||
//! Enum::Variant2(Some(_), 0) => {} // `q`
|
|
||||||
//! }
|
|
||||||
//! ```
|
|
||||||
//!
|
|
||||||
//! We can easily see that if our candidate value `v` starts with `Variant1` it will not match `q`.
|
|
||||||
//! If `v = Variant2(v0, v1)` however, whether or not it matches `p2` and `q` will depend on `v0`
|
|
||||||
//! and `v1`. In fact, such a `v` will be a witness of usefulness of `q` exactly when the tuple
|
|
||||||
//! `(v0, v1)` is a witness of usefulness of `q'` in the following reduced match:
|
|
||||||
//!
|
|
||||||
//! ```
|
|
||||||
//! match x {
|
|
||||||
//! (None, 0) => {} // `p2'`
|
|
||||||
//! (Some(_), 0) => {} // `q'`
|
|
||||||
//! }
|
|
||||||
//! ```
|
|
||||||
//!
|
|
||||||
//! This motivates a new step in computing usefulness, that we call _specialization_.
|
|
||||||
//! Specialization consist of filtering a list of patterns for those that match a constructor, and
|
|
||||||
//! then looking into the constructor's fields. This enables usefulness to be computed recursively.
|
|
||||||
//!
|
|
||||||
//! Instead of acting on a single pattern in each row, we will consider a list of patterns for each
|
|
||||||
//! row, and we call such a list a _pattern-stack_. The idea is that we will specialize the
|
|
||||||
//! leftmost pattern, which amounts to popping the constructor and pushing its fields, which feels
|
|
||||||
//! like a stack. We note a pattern-stack simply with `[p_1 ... p_n]`.
|
|
||||||
//! Here's a sequence of specializations of a list of pattern-stacks, to illustrate what's
|
|
||||||
//! happening:
|
|
||||||
//! ```
|
|
||||||
//! [Enum::Variant1(_)]
|
|
||||||
//! [Enum::Variant2(None, 0)]
|
|
||||||
//! [Enum::Variant2(Some(_), 0)]
|
|
||||||
//! //==>> specialize with `Variant2`
|
|
||||||
//! [None, 0]
|
|
||||||
//! [Some(_), 0]
|
|
||||||
//! //==>> specialize with `Some`
|
|
||||||
//! [_, 0]
|
|
||||||
//! //==>> specialize with `true` (say the type was `bool`)
|
|
||||||
//! [0]
|
|
||||||
//! //==>> specialize with `0`
|
|
||||||
//! []
|
|
||||||
//! ```
|
|
||||||
//!
|
|
||||||
//! The function `specialize(c, p)` takes a value constructor `c` and a pattern `p`, and returns 0
|
|
||||||
//! or more pattern-stacks. If `c` does not match the head constructor of `p`, it returns nothing;
|
|
||||||
//! otherwise if returns the fields of the constructor. This only returns more than one
|
|
||||||
//! pattern-stack if `p` has a pattern-only constructor.
|
|
||||||
//!
|
|
||||||
//! - Specializing for the wrong constructor returns nothing
|
|
||||||
//!
|
|
||||||
//! `specialize(None, Some(p0)) := []`
|
|
||||||
//!
|
|
||||||
//! - Specializing for the correct constructor returns a single row with the fields
|
|
||||||
//!
|
|
||||||
//! `specialize(Variant1, Variant1(p0, p1, p2)) := [[p0, p1, p2]]`
|
|
||||||
//!
|
|
||||||
//! `specialize(Foo{..}, Foo { bar: p0, baz: p1 }) := [[p0, p1]]`
|
|
||||||
//!
|
|
||||||
//! - For or-patterns, we specialize each branch and concatenate the results
|
|
||||||
//!
|
|
||||||
//! `specialize(c, p0 | p1) := specialize(c, p0) ++ specialize(c, p1)`
|
|
||||||
//!
|
|
||||||
//! - We treat the other pattern constructors as if they were a large or-pattern of all the
|
|
||||||
//! possibilities:
|
|
||||||
//!
|
|
||||||
//! `specialize(c, _) := specialize(c, Variant1(_) | Variant2(_, _) | ...)`
|
|
||||||
//!
|
|
||||||
//! `specialize(c, 1..=100) := specialize(c, 1 | ... | 100)`
|
|
||||||
//!
|
|
||||||
//! `specialize(c, [p0, .., p1]) := specialize(c, [p0, p1] | [p0, _, p1] | [p0, _, _, p1] | ...)`
|
|
||||||
//!
|
|
||||||
//! - If `c` is a pattern-only constructor, `specialize` is defined on a case-by-case basis. See
|
|
||||||
//! the discussion about constructor splitting in [`super::deconstruct_pat`].
|
|
||||||
//!
|
|
||||||
//!
|
|
||||||
//! We then extend this function to work with pattern-stacks as input, by acting on the first
|
|
||||||
//! column and keeping the other columns untouched.
|
|
||||||
//!
|
|
||||||
//! Specialization for the whole matrix is done in [`Matrix::specialize_constructor`]. Note that
|
|
||||||
//! or-patterns in the first column are expanded before being stored in the matrix. Specialization
|
|
||||||
//! for a single patstack is done from a combination of [`Constructor::is_covered_by`] and
|
|
||||||
//! [`PatStack::pop_head_constructor`]. The internals of how it's done mostly live in the
|
|
||||||
//! [`Fields`] struct.
|
|
||||||
//!
|
|
||||||
//!
|
|
||||||
//! # Computing usefulness
|
|
||||||
//!
|
|
||||||
//! We now have all we need to compute usefulness. The inputs to usefulness are a list of
|
|
||||||
//! pattern-stacks `p_1 ... p_n` (one per row), and a new pattern_stack `q`. The paper and this
|
|
||||||
//! file calls the list of patstacks a _matrix_. They must all have the same number of columns and
|
|
||||||
//! the patterns in a given column must all have the same type. `usefulness` returns a (possibly
|
|
||||||
//! empty) list of witnesses of usefulness. These witnesses will also be pattern-stacks.
|
|
||||||
//!
|
|
||||||
//! - base case: `n_columns == 0`.
|
|
||||||
//! Since a pattern-stack functions like a tuple of patterns, an empty one functions like the
|
|
||||||
//! unit type. Thus `q` is useful iff there are no rows above it, i.e. if `n == 0`.
|
|
||||||
//!
|
|
||||||
//! - inductive case: `n_columns > 0`.
|
|
||||||
//! We need a way to list the constructors we want to try. We will be more clever in the next
|
|
||||||
//! section but for now assume we list all value constructors for the type of the first column.
|
|
||||||
//!
|
|
||||||
//! - for each such ctor `c`:
|
|
||||||
//!
|
|
||||||
//! - for each `q'` returned by `specialize(c, q)`:
|
|
||||||
//!
|
|
||||||
//! - we compute `usefulness(specialize(c, p_1) ... specialize(c, p_n), q')`
|
|
||||||
//!
|
|
||||||
//! - for each witness found, we revert specialization by pushing the constructor `c` on top.
|
|
||||||
//!
|
|
||||||
//! - We return the concatenation of all the witnesses found, if any.
|
|
||||||
//!
|
|
||||||
//! Example:
|
|
||||||
//! ```
|
|
||||||
//! [Some(true)] // p_1
|
|
||||||
//! [None] // p_2
|
|
||||||
//! [Some(_)] // q
|
|
||||||
//! //==>> try `None`: `specialize(None, q)` returns nothing
|
|
||||||
//! //==>> try `Some`: `specialize(Some, q)` returns a single row
|
|
||||||
//! [true] // p_1'
|
|
||||||
//! [_] // q'
|
|
||||||
//! //==>> try `true`: `specialize(true, q')` returns a single row
|
|
||||||
//! [] // p_1''
|
|
||||||
//! [] // q''
|
|
||||||
//! //==>> base case; `n != 0` so `q''` is not useful.
|
|
||||||
//! //==>> go back up a step
|
|
||||||
//! [true] // p_1'
|
|
||||||
//! [_] // q'
|
|
||||||
//! //==>> try `false`: `specialize(false, q')` returns a single row
|
|
||||||
//! [] // q''
|
|
||||||
//! //==>> base case; `n == 0` so `q''` is useful. We return the single witness `[]`
|
|
||||||
//! witnesses:
|
|
||||||
//! []
|
|
||||||
//! //==>> undo the specialization with `false`
|
|
||||||
//! witnesses:
|
|
||||||
//! [false]
|
|
||||||
//! //==>> undo the specialization with `Some`
|
|
||||||
//! witnesses:
|
|
||||||
//! [Some(false)]
|
|
||||||
//! //==>> we have tried all the constructors. The output is the single witness `[Some(false)]`.
|
|
||||||
//! ```
|
|
||||||
//!
|
|
||||||
//! This computation is done in [`is_useful`]. In practice we don't care about the list of
|
|
||||||
//! witnesses when computing reachability; we only need to know whether any exist. We do keep the
|
|
||||||
//! witnesses when computing exhaustiveness to report them to the user.
|
|
||||||
//!
|
|
||||||
//!
|
|
||||||
//! # Making usefulness tractable: constructor splitting
|
|
||||||
//!
|
|
||||||
//! We're missing one last detail: which constructors do we list? Naively listing all value
|
|
||||||
//! constructors cannot work for types like `u64` or `&str`, so we need to be more clever. The
|
|
||||||
//! first obvious insight is that we only want to list constructors that are covered by the head
|
|
||||||
//! constructor of `q`. If it's a value constructor, we only try that one. If it's a pattern-only
|
|
||||||
//! constructor, we use the final clever idea for this algorithm: _constructor splitting_, where we
|
|
||||||
//! group together constructors that behave the same.
|
|
||||||
//!
|
|
||||||
//! The details are not necessary to understand this file, so we explain them in
|
|
||||||
//! [`super::deconstruct_pat`]. Splitting is done by the [`Constructor::split`] function.
|
|
||||||
|
|
||||||
use std::iter::once;
|
|
||||||
|
|
||||||
use hir_def::{AdtId, DefWithBodyId, HasModule, ModuleId};
|
|
||||||
use smallvec::{smallvec, SmallVec};
|
|
||||||
use typed_arena::Arena;
|
|
||||||
|
|
||||||
use crate::{db::HirDatabase, inhabitedness::is_ty_uninhabited_from, Ty, TyExt};
|
|
||||||
|
|
||||||
use super::deconstruct_pat::{Constructor, DeconstructedPat, Fields, SplitWildcard};
|
|
||||||
|
|
||||||
use self::{helper::Captures, ArmType::*, Usefulness::*};
|
|
||||||
|
|
||||||
pub(crate) struct MatchCheckCtx<'a, 'p> {
|
|
||||||
pub(crate) module: ModuleId,
|
|
||||||
pub(crate) body: DefWithBodyId,
|
|
||||||
pub(crate) db: &'a dyn HirDatabase,
|
|
||||||
/// Lowered patterns from arms plus generated by the check.
|
|
||||||
pub(crate) pattern_arena: &'p Arena<DeconstructedPat<'p>>,
|
|
||||||
exhaustive_patterns: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, 'p> MatchCheckCtx<'a, 'p> {
|
|
||||||
pub(crate) fn new(
|
|
||||||
module: ModuleId,
|
|
||||||
body: DefWithBodyId,
|
|
||||||
db: &'a dyn HirDatabase,
|
|
||||||
pattern_arena: &'p Arena<DeconstructedPat<'p>>,
|
|
||||||
) -> Self {
|
|
||||||
let def_map = db.crate_def_map(module.krate());
|
|
||||||
let exhaustive_patterns = def_map.is_unstable_feature_enabled("exhaustive_patterns");
|
|
||||||
Self { module, body, db, pattern_arena, exhaustive_patterns }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn is_uninhabited(&self, ty: &Ty) -> bool {
|
|
||||||
if self.feature_exhaustive_patterns() {
|
|
||||||
is_ty_uninhabited_from(ty, self.module, self.db)
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns whether the given type is an enum from another crate declared `#[non_exhaustive]`.
|
|
||||||
pub(super) fn is_foreign_non_exhaustive_enum(&self, ty: &Ty) -> bool {
|
|
||||||
match ty.as_adt() {
|
|
||||||
Some((adt @ AdtId::EnumId(_), _)) => {
|
|
||||||
let has_non_exhaustive_attr =
|
|
||||||
self.db.attrs(adt.into()).by_key("non_exhaustive").exists();
|
|
||||||
let is_local = adt.module(self.db.upcast()).krate() == self.module.krate();
|
|
||||||
has_non_exhaustive_attr && !is_local
|
|
||||||
}
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rust's unstable feature described as "Allows exhaustive pattern matching on types that contain uninhabited types."
|
|
||||||
pub(super) fn feature_exhaustive_patterns(&self) -> bool {
|
|
||||||
self.exhaustive_patterns
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone)]
|
|
||||||
pub(super) struct PatCtxt<'a, 'p> {
|
|
||||||
pub(super) cx: &'a MatchCheckCtx<'a, 'p>,
|
|
||||||
/// Type of the current column under investigation.
|
|
||||||
pub(super) ty: &'a Ty,
|
|
||||||
/// Whether the current pattern is the whole pattern as found in a match arm, or if it's a
|
|
||||||
/// subpattern.
|
|
||||||
pub(super) is_top_level: bool,
|
|
||||||
/// Whether the current pattern is from a `non_exhaustive` enum.
|
|
||||||
pub(super) is_non_exhaustive: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A row of a matrix. Rows of len 1 are very common, which is why `SmallVec[_; 2]`
|
|
||||||
/// works well.
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub(super) struct PatStack<'p> {
|
|
||||||
pats: SmallVec<[&'p DeconstructedPat<'p>; 2]>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'p> PatStack<'p> {
|
|
||||||
fn from_pattern(pat: &'p DeconstructedPat<'p>) -> Self {
|
|
||||||
Self::from_vec(smallvec![pat])
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_vec(vec: SmallVec<[&'p DeconstructedPat<'p>; 2]>) -> Self {
|
|
||||||
PatStack { pats: vec }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_empty(&self) -> bool {
|
|
||||||
self.pats.is_empty()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn len(&self) -> usize {
|
|
||||||
self.pats.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn head(&self) -> &'p DeconstructedPat<'p> {
|
|
||||||
self.pats[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Recursively expand the first pattern into its subpatterns. Only useful if the pattern is an
|
|
||||||
// or-pattern. Panics if `self` is empty.
|
|
||||||
fn expand_or_pat(&self) -> impl Iterator<Item = PatStack<'p>> + Captures<'_> {
|
|
||||||
self.head().iter_fields().map(move |pat| {
|
|
||||||
let mut new_patstack = PatStack::from_pattern(pat);
|
|
||||||
new_patstack.pats.extend_from_slice(&self.pats[1..]);
|
|
||||||
new_patstack
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This computes `S(self.head().ctor(), self)`. See top of the file for explanations.
|
|
||||||
///
|
|
||||||
/// Structure patterns with a partial wild pattern (Foo { a: 42, .. }) have their missing
|
|
||||||
/// fields filled with wild patterns.
|
|
||||||
///
|
|
||||||
/// This is roughly the inverse of `Constructor::apply`.
|
|
||||||
fn pop_head_constructor(&self, cx: &MatchCheckCtx<'_, 'p>, ctor: &Constructor) -> PatStack<'p> {
|
|
||||||
// We pop the head pattern and push the new fields extracted from the arguments of
|
|
||||||
// `self.head()`.
|
|
||||||
let mut new_fields: SmallVec<[_; 2]> = self.head().specialize(cx, ctor);
|
|
||||||
new_fields.extend_from_slice(&self.pats[1..]);
|
|
||||||
PatStack::from_vec(new_fields)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A 2D matrix.
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub(super) struct Matrix<'p> {
|
|
||||||
patterns: Vec<PatStack<'p>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'p> Matrix<'p> {
|
|
||||||
fn empty() -> Self {
|
|
||||||
Matrix { patterns: vec![] }
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Number of columns of this matrix. `None` is the matrix is empty.
|
|
||||||
pub(super) fn _column_count(&self) -> Option<usize> {
|
|
||||||
self.patterns.first().map(|r| r.len())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Pushes a new row to the matrix. If the row starts with an or-pattern, this recursively
|
|
||||||
/// expands it.
|
|
||||||
fn push(&mut self, row: PatStack<'p>) {
|
|
||||||
if !row.is_empty() && row.head().is_or_pat() {
|
|
||||||
self.patterns.extend(row.expand_or_pat());
|
|
||||||
} else {
|
|
||||||
self.patterns.push(row);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Iterate over the first component of each row
|
|
||||||
fn heads(&self) -> impl Iterator<Item = &'p DeconstructedPat<'p>> + Clone + Captures<'_> {
|
|
||||||
self.patterns.iter().map(|r| r.head())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This computes `S(constructor, self)`. See top of the file for explanations.
|
|
||||||
fn specialize_constructor(&self, pcx: PatCtxt<'_, 'p>, ctor: &Constructor) -> Matrix<'p> {
|
|
||||||
let mut matrix = Matrix::empty();
|
|
||||||
for row in &self.patterns {
|
|
||||||
if ctor.is_covered_by(pcx, row.head().ctor()) {
|
|
||||||
let new_row = row.pop_head_constructor(pcx.cx, ctor);
|
|
||||||
matrix.push(new_row);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
matrix
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This carries the results of computing usefulness, as described at the top of the file. When
|
|
||||||
/// checking usefulness of a match branch, we use the `NoWitnesses` variant, which also keeps track
|
|
||||||
/// of potential unreachable sub-patterns (in the presence of or-patterns). When checking
|
|
||||||
/// exhaustiveness of a whole match, we use the `WithWitnesses` variant, which carries a list of
|
|
||||||
/// witnesses of non-exhaustiveness when there are any.
|
|
||||||
/// Which variant to use is dictated by `ArmType`.
|
|
||||||
enum Usefulness<'p> {
|
|
||||||
/// If we don't care about witnesses, simply remember if the pattern was useful.
|
|
||||||
NoWitnesses { useful: bool },
|
|
||||||
/// Carries a list of witnesses of non-exhaustiveness. If empty, indicates that the whole
|
|
||||||
/// pattern is unreachable.
|
|
||||||
WithWitnesses(Vec<Witness<'p>>),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'p> Usefulness<'p> {
|
|
||||||
fn new_useful(preference: ArmType) -> Self {
|
|
||||||
match preference {
|
|
||||||
// A single (empty) witness of reachability.
|
|
||||||
FakeExtraWildcard => WithWitnesses(vec![Witness(vec![])]),
|
|
||||||
RealArm => NoWitnesses { useful: true },
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn new_not_useful(preference: ArmType) -> Self {
|
|
||||||
match preference {
|
|
||||||
FakeExtraWildcard => WithWitnesses(vec![]),
|
|
||||||
RealArm => NoWitnesses { useful: false },
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_useful(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Usefulness::NoWitnesses { useful } => *useful,
|
|
||||||
Usefulness::WithWitnesses(witnesses) => !witnesses.is_empty(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Combine usefulnesses from two branches. This is an associative operation.
|
|
||||||
fn extend(&mut self, other: Self) {
|
|
||||||
match (&mut *self, other) {
|
|
||||||
(WithWitnesses(_), WithWitnesses(o)) if o.is_empty() => {}
|
|
||||||
(WithWitnesses(s), WithWitnesses(o)) if s.is_empty() => *self = WithWitnesses(o),
|
|
||||||
(WithWitnesses(s), WithWitnesses(o)) => s.extend(o),
|
|
||||||
(NoWitnesses { useful: s_useful }, NoWitnesses { useful: o_useful }) => {
|
|
||||||
*s_useful = *s_useful || o_useful
|
|
||||||
}
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// After calculating usefulness after a specialization, call this to reconstruct a usefulness
|
|
||||||
/// that makes sense for the matrix pre-specialization. This new usefulness can then be merged
|
|
||||||
/// with the results of specializing with the other constructors.
|
|
||||||
fn apply_constructor(
|
|
||||||
self,
|
|
||||||
pcx: PatCtxt<'_, 'p>,
|
|
||||||
matrix: &Matrix<'p>,
|
|
||||||
ctor: &Constructor,
|
|
||||||
) -> Self {
|
|
||||||
match self {
|
|
||||||
NoWitnesses { .. } => self,
|
|
||||||
WithWitnesses(ref witnesses) if witnesses.is_empty() => self,
|
|
||||||
WithWitnesses(witnesses) => {
|
|
||||||
let new_witnesses = if let Constructor::Missing { .. } = ctor {
|
|
||||||
// We got the special `Missing` constructor, so each of the missing constructors
|
|
||||||
// gives a new pattern that is not caught by the match. We list those patterns.
|
|
||||||
let new_patterns = if pcx.is_non_exhaustive {
|
|
||||||
// Here we don't want the user to try to list all variants, we want them to add
|
|
||||||
// a wildcard, so we only suggest that.
|
|
||||||
vec![DeconstructedPat::wildcard(pcx.ty.clone())]
|
|
||||||
} else {
|
|
||||||
let mut split_wildcard = SplitWildcard::new(pcx);
|
|
||||||
split_wildcard.split(pcx, matrix.heads().map(DeconstructedPat::ctor));
|
|
||||||
|
|
||||||
// This lets us know if we skipped any variants because they are marked
|
|
||||||
// `doc(hidden)` or they are unstable feature gate (only stdlib types).
|
|
||||||
let mut hide_variant_show_wild = false;
|
|
||||||
// Construct for each missing constructor a "wild" version of this
|
|
||||||
// constructor, that matches everything that can be built with
|
|
||||||
// it. For example, if `ctor` is a `Constructor::Variant` for
|
|
||||||
// `Option::Some`, we get the pattern `Some(_)`.
|
|
||||||
let mut new: Vec<DeconstructedPat<'_>> = split_wildcard
|
|
||||||
.iter_missing(pcx)
|
|
||||||
.filter_map(|missing_ctor| {
|
|
||||||
// Check if this variant is marked `doc(hidden)`
|
|
||||||
if missing_ctor.is_doc_hidden_variant(pcx)
|
|
||||||
|| missing_ctor.is_unstable_variant(pcx)
|
|
||||||
{
|
|
||||||
hide_variant_show_wild = true;
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
Some(DeconstructedPat::wild_from_ctor(pcx, missing_ctor.clone()))
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
if hide_variant_show_wild {
|
|
||||||
new.push(DeconstructedPat::wildcard(pcx.ty.clone()))
|
|
||||||
}
|
|
||||||
|
|
||||||
new
|
|
||||||
};
|
|
||||||
|
|
||||||
witnesses
|
|
||||||
.into_iter()
|
|
||||||
.flat_map(|witness| {
|
|
||||||
new_patterns.iter().map(move |pat| {
|
|
||||||
Witness(
|
|
||||||
witness
|
|
||||||
.0
|
|
||||||
.iter()
|
|
||||||
.chain(once(pat))
|
|
||||||
.map(DeconstructedPat::clone_and_forget_reachability)
|
|
||||||
.collect(),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
} else {
|
|
||||||
witnesses
|
|
||||||
.into_iter()
|
|
||||||
.map(|witness| witness.apply_constructor(pcx, ctor))
|
|
||||||
.collect()
|
|
||||||
};
|
|
||||||
WithWitnesses(new_witnesses)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug)]
|
|
||||||
enum ArmType {
|
|
||||||
FakeExtraWildcard,
|
|
||||||
RealArm,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A witness of non-exhaustiveness for error reporting, represented
|
|
||||||
/// as a list of patterns (in reverse order of construction) with
|
|
||||||
/// wildcards inside to represent elements that can take any inhabitant
|
|
||||||
/// of the type as a value.
|
|
||||||
///
|
|
||||||
/// A witness against a list of patterns should have the same types
|
|
||||||
/// and length as the pattern matched against. Because Rust `match`
|
|
||||||
/// is always against a single pattern, at the end the witness will
|
|
||||||
/// have length 1, but in the middle of the algorithm, it can contain
|
|
||||||
/// multiple patterns.
|
|
||||||
///
|
|
||||||
/// For example, if we are constructing a witness for the match against
|
|
||||||
///
|
|
||||||
/// ```
|
|
||||||
/// struct Pair(Option<(u32, u32)>, bool);
|
|
||||||
///
|
|
||||||
/// match (p: Pair) {
|
|
||||||
/// Pair(None, _) => {}
|
|
||||||
/// Pair(_, false) => {}
|
|
||||||
/// }
|
|
||||||
/// ```
|
|
||||||
///
|
|
||||||
/// We'll perform the following steps:
|
|
||||||
/// 1. Start with an empty witness
|
|
||||||
/// `Witness(vec![])`
|
|
||||||
/// 2. Push a witness `true` against the `false`
|
|
||||||
/// `Witness(vec![true])`
|
|
||||||
/// 3. Push a witness `Some(_)` against the `None`
|
|
||||||
/// `Witness(vec![true, Some(_)])`
|
|
||||||
/// 4. Apply the `Pair` constructor to the witnesses
|
|
||||||
/// `Witness(vec![Pair(Some(_), true)])`
|
|
||||||
///
|
|
||||||
/// The final `Pair(Some(_), true)` is then the resulting witness.
|
|
||||||
pub(crate) struct Witness<'p>(Vec<DeconstructedPat<'p>>);
|
|
||||||
|
|
||||||
impl<'p> Witness<'p> {
|
|
||||||
/// Asserts that the witness contains a single pattern, and returns it.
|
|
||||||
fn single_pattern(self) -> DeconstructedPat<'p> {
|
|
||||||
assert_eq!(self.0.len(), 1);
|
|
||||||
self.0.into_iter().next().unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Constructs a partial witness for a pattern given a list of
|
|
||||||
/// patterns expanded by the specialization step.
|
|
||||||
///
|
|
||||||
/// When a pattern P is discovered to be useful, this function is used bottom-up
|
|
||||||
/// to reconstruct a complete witness, e.g., a pattern P' that covers a subset
|
|
||||||
/// of values, V, where each value in that set is not covered by any previously
|
|
||||||
/// used patterns and is covered by the pattern P'. Examples:
|
|
||||||
///
|
|
||||||
/// left_ty: tuple of 3 elements
|
|
||||||
/// pats: [10, 20, _] => (10, 20, _)
|
|
||||||
///
|
|
||||||
/// left_ty: struct X { a: (bool, &'static str), b: usize}
|
|
||||||
/// pats: [(false, "foo"), 42] => X { a: (false, "foo"), b: 42 }
|
|
||||||
fn apply_constructor(mut self, pcx: PatCtxt<'_, 'p>, ctor: &Constructor) -> Self {
|
|
||||||
let pat = {
|
|
||||||
let len = self.0.len();
|
|
||||||
let arity = ctor.arity(pcx);
|
|
||||||
let pats = self.0.drain((len - arity)..).rev();
|
|
||||||
let fields = Fields::from_iter(pcx.cx, pats);
|
|
||||||
DeconstructedPat::new(ctor.clone(), fields, pcx.ty.clone())
|
|
||||||
};
|
|
||||||
|
|
||||||
self.0.push(pat);
|
|
||||||
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Algorithm from <http://moscova.inria.fr/~maranget/papers/warn/index.html>.
|
|
||||||
/// The algorithm from the paper has been modified to correctly handle empty
|
|
||||||
/// types. The changes are:
|
|
||||||
/// (0) We don't exit early if the pattern matrix has zero rows. We just
|
|
||||||
/// continue to recurse over columns.
|
|
||||||
/// (1) all_constructors will only return constructors that are statically
|
|
||||||
/// possible. E.g., it will only return `Ok` for `Result<T, !>`.
|
|
||||||
///
|
|
||||||
/// This finds whether a (row) vector `v` of patterns is 'useful' in relation
|
|
||||||
/// to a set of such vectors `m` - this is defined as there being a set of
|
|
||||||
/// inputs that will match `v` but not any of the sets in `m`.
|
|
||||||
///
|
|
||||||
/// All the patterns at each column of the `matrix ++ v` matrix must have the same type.
|
|
||||||
///
|
|
||||||
/// This is used both for reachability checking (if a pattern isn't useful in
|
|
||||||
/// relation to preceding patterns, it is not reachable) and exhaustiveness
|
|
||||||
/// checking (if a wildcard pattern is useful in relation to a matrix, the
|
|
||||||
/// matrix isn't exhaustive).
|
|
||||||
///
|
|
||||||
/// `is_under_guard` is used to inform if the pattern has a guard. If it
|
|
||||||
/// has one it must not be inserted into the matrix. This shouldn't be
|
|
||||||
/// relied on for soundness.
|
|
||||||
fn is_useful<'p>(
|
|
||||||
cx: &MatchCheckCtx<'_, 'p>,
|
|
||||||
matrix: &Matrix<'p>,
|
|
||||||
v: &PatStack<'p>,
|
|
||||||
witness_preference: ArmType,
|
|
||||||
is_under_guard: bool,
|
|
||||||
is_top_level: bool,
|
|
||||||
) -> Usefulness<'p> {
|
|
||||||
let Matrix { patterns: rows, .. } = matrix;
|
|
||||||
|
|
||||||
// The base case. We are pattern-matching on () and the return value is
|
|
||||||
// based on whether our matrix has a row or not.
|
|
||||||
// NOTE: This could potentially be optimized by checking rows.is_empty()
|
|
||||||
// first and then, if v is non-empty, the return value is based on whether
|
|
||||||
// the type of the tuple we're checking is inhabited or not.
|
|
||||||
if v.is_empty() {
|
|
||||||
let ret = if rows.is_empty() {
|
|
||||||
Usefulness::new_useful(witness_preference)
|
|
||||||
} else {
|
|
||||||
Usefulness::new_not_useful(witness_preference)
|
|
||||||
};
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
|
|
||||||
debug_assert!(rows.iter().all(|r| r.len() == v.len()));
|
|
||||||
|
|
||||||
let ty = v.head().ty();
|
|
||||||
let is_non_exhaustive = cx.is_foreign_non_exhaustive_enum(ty);
|
|
||||||
let pcx = PatCtxt { cx, ty, is_top_level, is_non_exhaustive };
|
|
||||||
|
|
||||||
// If the first pattern is an or-pattern, expand it.
|
|
||||||
let mut ret = Usefulness::new_not_useful(witness_preference);
|
|
||||||
if v.head().is_or_pat() {
|
|
||||||
// We try each or-pattern branch in turn.
|
|
||||||
let mut matrix = matrix.clone();
|
|
||||||
for v in v.expand_or_pat() {
|
|
||||||
let usefulness = is_useful(cx, &matrix, &v, witness_preference, is_under_guard, false);
|
|
||||||
ret.extend(usefulness);
|
|
||||||
// If pattern has a guard don't add it to the matrix.
|
|
||||||
if !is_under_guard {
|
|
||||||
// We push the already-seen patterns into the matrix in order to detect redundant
|
|
||||||
// branches like `Some(_) | Some(0)`.
|
|
||||||
matrix.push(v);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let v_ctor = v.head().ctor();
|
|
||||||
|
|
||||||
// FIXME: implement `overlapping_range_endpoints` lint
|
|
||||||
|
|
||||||
// We split the head constructor of `v`.
|
|
||||||
let split_ctors = v_ctor.split(pcx, matrix.heads().map(DeconstructedPat::ctor));
|
|
||||||
// For each constructor, we compute whether there's a value that starts with it that would
|
|
||||||
// witness the usefulness of `v`.
|
|
||||||
let start_matrix = matrix;
|
|
||||||
for ctor in split_ctors {
|
|
||||||
// We cache the result of `Fields::wildcards` because it is used a lot.
|
|
||||||
let spec_matrix = start_matrix.specialize_constructor(pcx, &ctor);
|
|
||||||
let v = v.pop_head_constructor(cx, &ctor);
|
|
||||||
let usefulness =
|
|
||||||
is_useful(cx, &spec_matrix, &v, witness_preference, is_under_guard, false);
|
|
||||||
let usefulness = usefulness.apply_constructor(pcx, start_matrix, &ctor);
|
|
||||||
|
|
||||||
// FIXME: implement `non_exhaustive_omitted_patterns` lint
|
|
||||||
|
|
||||||
ret.extend(usefulness);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if ret.is_useful() {
|
|
||||||
v.head().set_reachable();
|
|
||||||
}
|
|
||||||
|
|
||||||
ret
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The arm of a match expression.
|
|
||||||
#[derive(Clone, Copy)]
|
|
||||||
pub(crate) struct MatchArm<'p> {
|
|
||||||
pub(crate) pat: &'p DeconstructedPat<'p>,
|
|
||||||
pub(crate) has_guard: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Indicates whether or not a given arm is reachable.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub(crate) enum Reachability {
|
|
||||||
/// The arm is reachable. This additionally carries a set of or-pattern branches that have been
|
|
||||||
/// found to be unreachable despite the overall arm being reachable. Used only in the presence
|
|
||||||
/// of or-patterns, otherwise it stays empty.
|
|
||||||
// FIXME: store unreachable subpattern IDs
|
|
||||||
Reachable,
|
|
||||||
/// The arm is unreachable.
|
|
||||||
Unreachable,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The output of checking a match for exhaustiveness and arm reachability.
|
|
||||||
pub(crate) struct UsefulnessReport<'p> {
|
|
||||||
/// For each arm of the input, whether that arm is reachable after the arms above it.
|
|
||||||
pub(crate) _arm_usefulness: Vec<(MatchArm<'p>, Reachability)>,
|
|
||||||
/// If the match is exhaustive, this is empty. If not, this contains witnesses for the lack of
|
|
||||||
/// exhaustiveness.
|
|
||||||
pub(crate) non_exhaustiveness_witnesses: Vec<DeconstructedPat<'p>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The entrypoint for the usefulness algorithm. Computes whether a match is exhaustive and which
|
|
||||||
/// of its arms are reachable.
|
|
||||||
///
|
|
||||||
/// Note: the input patterns must have been lowered through
|
|
||||||
/// `check_match::MatchVisitor::lower_pattern`.
|
|
||||||
pub(crate) fn compute_match_usefulness<'p>(
|
|
||||||
cx: &MatchCheckCtx<'_, 'p>,
|
|
||||||
arms: &[MatchArm<'p>],
|
|
||||||
scrut_ty: &Ty,
|
|
||||||
) -> UsefulnessReport<'p> {
|
|
||||||
let mut matrix = Matrix::empty();
|
|
||||||
let arm_usefulness = arms
|
|
||||||
.iter()
|
|
||||||
.copied()
|
|
||||||
.map(|arm| {
|
|
||||||
let v = PatStack::from_pattern(arm.pat);
|
|
||||||
is_useful(cx, &matrix, &v, RealArm, arm.has_guard, true);
|
|
||||||
if !arm.has_guard {
|
|
||||||
matrix.push(v);
|
|
||||||
}
|
|
||||||
let reachability = if arm.pat.is_reachable() {
|
|
||||||
Reachability::Reachable
|
|
||||||
} else {
|
|
||||||
Reachability::Unreachable
|
|
||||||
};
|
|
||||||
(arm, reachability)
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let wild_pattern = cx.pattern_arena.alloc(DeconstructedPat::wildcard(scrut_ty.clone()));
|
|
||||||
let v = PatStack::from_pattern(wild_pattern);
|
|
||||||
let usefulness = is_useful(cx, &matrix, &v, FakeExtraWildcard, false, true);
|
|
||||||
let non_exhaustiveness_witnesses = match usefulness {
|
|
||||||
WithWitnesses(pats) => pats.into_iter().map(Witness::single_pattern).collect(),
|
|
||||||
NoWitnesses { .. } => panic!("bug"),
|
|
||||||
};
|
|
||||||
UsefulnessReport { _arm_usefulness: arm_usefulness, non_exhaustiveness_witnesses }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) mod helper {
|
|
||||||
// Copy-pasted from rust/compiler/rustc_data_structures/src/captures.rs
|
|
||||||
/// "Signaling" trait used in impl trait to tag lifetimes that you may
|
|
||||||
/// need to capture but don't really need for other reasons.
|
|
||||||
/// Basically a workaround; see [this comment] for details.
|
|
||||||
///
|
|
||||||
/// [this comment]: https://github.com/rust-lang/rust/issues/34511#issuecomment-373423999
|
|
||||||
// FIXME(eddyb) false positive, the lifetime parameter is "phantom" but needed.
|
|
||||||
#[allow(unused_lifetimes)]
|
|
||||||
pub(crate) trait Captures<'a> {}
|
|
||||||
|
|
||||||
impl<'a, T: ?Sized> Captures<'a> for T {}
|
|
||||||
}
|
|
|
@ -605,8 +605,11 @@ fn render_const_scalar(
|
||||||
write!(f, "{}", f.db.union_data(u).name.display(f.db.upcast()))
|
write!(f, "{}", f.db.union_data(u).name.display(f.db.upcast()))
|
||||||
}
|
}
|
||||||
hir_def::AdtId::EnumId(e) => {
|
hir_def::AdtId::EnumId(e) => {
|
||||||
|
let Ok(target_data_layout) = f.db.target_data_layout(trait_env.krate) else {
|
||||||
|
return f.write_str("<target-layout-not-available>");
|
||||||
|
};
|
||||||
let Some((var_id, var_layout)) =
|
let Some((var_id, var_layout)) =
|
||||||
detect_variant_from_bytes(&layout, f.db, trait_env, b, e)
|
detect_variant_from_bytes(&layout, f.db, &target_data_layout, b, e)
|
||||||
else {
|
else {
|
||||||
return f.write_str("<failed-to-detect-variant>");
|
return f.write_str("<failed-to-detect-variant>");
|
||||||
};
|
};
|
||||||
|
|
|
@ -75,7 +75,7 @@ pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
|
||||||
|
|
||||||
/// The entry point of type inference.
|
/// The entry point of type inference.
|
||||||
pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
|
pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
|
||||||
let _p = profile::span("infer_query");
|
let _p = tracing::span!(tracing::Level::INFO, "infer_query").entered();
|
||||||
let resolver = def.resolver(db.upcast());
|
let resolver = def.resolver(db.upcast());
|
||||||
let body = db.body(def);
|
let body = db.body(def);
|
||||||
let mut ctx = InferenceContext::new(db, def, &body, resolver);
|
let mut ctx = InferenceContext::new(db, def, &body, resolver);
|
||||||
|
|
|
@ -509,7 +509,8 @@ impl<'a> InferenceTable<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn resolve_obligations_as_possible(&mut self) {
|
pub(crate) fn resolve_obligations_as_possible(&mut self) {
|
||||||
let _span = profile::span("resolve_obligations_as_possible");
|
let _span =
|
||||||
|
tracing::span!(tracing::Level::INFO, "resolve_obligations_as_possible").entered();
|
||||||
let mut changed = true;
|
let mut changed = true;
|
||||||
let mut obligations = mem::take(&mut self.resolve_obligations_buffer);
|
let mut obligations = mem::take(&mut self.resolve_obligations_buffer);
|
||||||
while mem::take(&mut changed) {
|
while mem::take(&mut changed) {
|
||||||
|
|
|
@ -84,8 +84,7 @@ impl TypeVisitor<Interner> for UninhabitedFrom<'_> {
|
||||||
Some(0) | None => CONTINUE_OPAQUELY_INHABITED,
|
Some(0) | None => CONTINUE_OPAQUELY_INHABITED,
|
||||||
Some(1..) => item_ty.super_visit_with(self, outer_binder),
|
Some(1..) => item_ty.super_visit_with(self, outer_binder),
|
||||||
},
|
},
|
||||||
|
_ => CONTINUE_OPAQUELY_INHABITED,
|
||||||
TyKind::Ref(..) | _ => CONTINUE_OPAQUELY_INHABITED,
|
|
||||||
};
|
};
|
||||||
self.recursive_ty.remove(ty);
|
self.recursive_ty.remove(ty);
|
||||||
self.max_depth += 1;
|
self.max_depth += 1;
|
||||||
|
|
|
@ -198,7 +198,7 @@ pub fn layout_of_ty_query(
|
||||||
trait_env: Arc<TraitEnvironment>,
|
trait_env: Arc<TraitEnvironment>,
|
||||||
) -> Result<Arc<Layout>, LayoutError> {
|
) -> Result<Arc<Layout>, LayoutError> {
|
||||||
let krate = trait_env.krate;
|
let krate = trait_env.krate;
|
||||||
let Some(target) = db.target_data_layout(krate) else {
|
let Ok(target) = db.target_data_layout(krate) else {
|
||||||
return Err(LayoutError::TargetLayoutNotAvailable);
|
return Err(LayoutError::TargetLayoutNotAvailable);
|
||||||
};
|
};
|
||||||
let cx = LayoutCx { target: &target };
|
let cx = LayoutCx { target: &target };
|
||||||
|
|
|
@ -32,7 +32,7 @@ pub fn layout_of_adt_query(
|
||||||
trait_env: Arc<TraitEnvironment>,
|
trait_env: Arc<TraitEnvironment>,
|
||||||
) -> Result<Arc<Layout>, LayoutError> {
|
) -> Result<Arc<Layout>, LayoutError> {
|
||||||
let krate = trait_env.krate;
|
let krate = trait_env.krate;
|
||||||
let Some(target) = db.target_data_layout(krate) else {
|
let Ok(target) = db.target_data_layout(krate) else {
|
||||||
return Err(LayoutError::TargetLayoutNotAvailable);
|
return Err(LayoutError::TargetLayoutNotAvailable);
|
||||||
};
|
};
|
||||||
let cx = LayoutCx { target: &target };
|
let cx = LayoutCx { target: &target };
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
use base_db::CrateId;
|
use base_db::CrateId;
|
||||||
use hir_def::layout::TargetDataLayout;
|
use hir_def::layout::TargetDataLayout;
|
||||||
|
use rustc_abi::{AlignFromBytesError, TargetDataLayoutErrors};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::db::HirDatabase;
|
use crate::db::HirDatabase;
|
||||||
|
@ -9,15 +10,40 @@ use crate::db::HirDatabase;
|
||||||
pub fn target_data_layout_query(
|
pub fn target_data_layout_query(
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
) -> Option<Arc<TargetDataLayout>> {
|
) -> Result<Arc<TargetDataLayout>, Arc<str>> {
|
||||||
let crate_graph = db.crate_graph();
|
let crate_graph = db.crate_graph();
|
||||||
let target_layout = crate_graph[krate].target_layout.as_ref().ok()?;
|
let res = crate_graph[krate].target_layout.as_deref();
|
||||||
let res = TargetDataLayout::parse_from_llvm_datalayout_string(target_layout);
|
match res {
|
||||||
if let Err(_e) = &res {
|
Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it) {
|
||||||
// FIXME: Print the error here once it implements debug/display
|
Ok(it) => Ok(Arc::new(it)),
|
||||||
// also logging here is somewhat wrong, but unfortunately this is the earliest place we can
|
Err(e) => {
|
||||||
// parse that doesn't impose a dependency to the rust-abi crate for project-model
|
Err(match e {
|
||||||
tracing::error!("Failed to parse target data layout for {krate:?}");
|
TargetDataLayoutErrors::InvalidAddressSpace { addr_space, cause, err } => {
|
||||||
|
format!(
|
||||||
|
r#"invalid address space `{addr_space}` for `{cause}` in "data-layout": {err}"#
|
||||||
|
)
|
||||||
|
}
|
||||||
|
TargetDataLayoutErrors::InvalidBits { kind, bit, cause, err } => format!(r#"invalid {kind} `{bit}` for `{cause}` in "data-layout": {err}"#),
|
||||||
|
TargetDataLayoutErrors::MissingAlignment { cause } => format!(r#"missing alignment for `{cause}` in "data-layout""#),
|
||||||
|
TargetDataLayoutErrors::InvalidAlignment { cause, err } => format!(
|
||||||
|
r#"invalid alignment for `{cause}` in "data-layout": `{align}` is {err_kind}"#,
|
||||||
|
align = err.align(),
|
||||||
|
err_kind = match err {
|
||||||
|
AlignFromBytesError::NotPowerOfTwo(_) => "not a power of two",
|
||||||
|
AlignFromBytesError::TooLarge(_) => "too large",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
TargetDataLayoutErrors::InconsistentTargetArchitecture { dl, target } => {
|
||||||
|
format!(r#"inconsistent target specification: "data-layout" claims architecture is {dl}-endian, while "target-endian" is `{target}`"#)
|
||||||
|
}
|
||||||
|
TargetDataLayoutErrors::InconsistentTargetPointerWidth {
|
||||||
|
pointer_size,
|
||||||
|
target,
|
||||||
|
} => format!(r#"inconsistent target specification: "data-layout" claims pointers are {pointer_size}-bit, while "target-pointer-width" is `{target}`"#),
|
||||||
|
TargetDataLayoutErrors::InvalidBitsSize { err } => err,
|
||||||
|
}.into())
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Err(e) => Err(Arc::from(&**e)),
|
||||||
}
|
}
|
||||||
res.ok().map(Arc::new)
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,6 +15,9 @@ extern crate rustc_abi;
|
||||||
#[cfg(not(feature = "in-rust-tree"))]
|
#[cfg(not(feature = "in-rust-tree"))]
|
||||||
extern crate ra_ap_rustc_abi as rustc_abi;
|
extern crate ra_ap_rustc_abi as rustc_abi;
|
||||||
|
|
||||||
|
// No need to use the in-tree one.
|
||||||
|
extern crate ra_ap_rustc_pattern_analysis as rustc_pattern_analysis;
|
||||||
|
|
||||||
mod builder;
|
mod builder;
|
||||||
mod chalk_db;
|
mod chalk_db;
|
||||||
mod chalk_ext;
|
mod chalk_ext;
|
||||||
|
@ -38,10 +41,10 @@ pub mod mir;
|
||||||
pub mod primitive;
|
pub mod primitive;
|
||||||
pub mod traits;
|
pub mod traits;
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests;
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test_db;
|
mod test_db;
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests;
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
collections::hash_map::Entry,
|
collections::hash_map::Entry,
|
||||||
|
@ -360,7 +363,6 @@ has_interner!(CallableSig);
|
||||||
pub enum FnAbi {
|
pub enum FnAbi {
|
||||||
Aapcs,
|
Aapcs,
|
||||||
AapcsUnwind,
|
AapcsUnwind,
|
||||||
AmdgpuKernel,
|
|
||||||
AvrInterrupt,
|
AvrInterrupt,
|
||||||
AvrNonBlockingInterrupt,
|
AvrNonBlockingInterrupt,
|
||||||
C,
|
C,
|
||||||
|
@ -419,7 +421,6 @@ impl FnAbi {
|
||||||
match s {
|
match s {
|
||||||
"aapcs-unwind" => FnAbi::AapcsUnwind,
|
"aapcs-unwind" => FnAbi::AapcsUnwind,
|
||||||
"aapcs" => FnAbi::Aapcs,
|
"aapcs" => FnAbi::Aapcs,
|
||||||
"amdgpu-kernel" => FnAbi::AmdgpuKernel,
|
|
||||||
"avr-interrupt" => FnAbi::AvrInterrupt,
|
"avr-interrupt" => FnAbi::AvrInterrupt,
|
||||||
"avr-non-blocking-interrupt" => FnAbi::AvrNonBlockingInterrupt,
|
"avr-non-blocking-interrupt" => FnAbi::AvrNonBlockingInterrupt,
|
||||||
"C-cmse-nonsecure-call" => FnAbi::CCmseNonsecureCall,
|
"C-cmse-nonsecure-call" => FnAbi::CCmseNonsecureCall,
|
||||||
|
@ -462,7 +463,6 @@ impl FnAbi {
|
||||||
match self {
|
match self {
|
||||||
FnAbi::Aapcs => "aapcs",
|
FnAbi::Aapcs => "aapcs",
|
||||||
FnAbi::AapcsUnwind => "aapcs-unwind",
|
FnAbi::AapcsUnwind => "aapcs-unwind",
|
||||||
FnAbi::AmdgpuKernel => "amdgpu-kernel",
|
|
||||||
FnAbi::AvrInterrupt => "avr-interrupt",
|
FnAbi::AvrInterrupt => "avr-interrupt",
|
||||||
FnAbi::AvrNonBlockingInterrupt => "avr-non-blocking-interrupt",
|
FnAbi::AvrNonBlockingInterrupt => "avr-non-blocking-interrupt",
|
||||||
FnAbi::C => "C",
|
FnAbi::C => "C",
|
||||||
|
|
|
@ -143,7 +143,8 @@ pub struct TraitImpls {
|
||||||
|
|
||||||
impl TraitImpls {
|
impl TraitImpls {
|
||||||
pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
|
pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
|
||||||
let _p = profile::span("trait_impls_in_crate_query").detail(|| format!("{krate:?}"));
|
let _p =
|
||||||
|
tracing::span!(tracing::Level::INFO, "trait_impls_in_crate_query", ?krate).entered();
|
||||||
let mut impls = FxHashMap::default();
|
let mut impls = FxHashMap::default();
|
||||||
|
|
||||||
Self::collect_def_map(db, &mut impls, &db.crate_def_map(krate));
|
Self::collect_def_map(db, &mut impls, &db.crate_def_map(krate));
|
||||||
|
@ -155,7 +156,7 @@ impl TraitImpls {
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
block: BlockId,
|
block: BlockId,
|
||||||
) -> Option<Arc<Self>> {
|
) -> Option<Arc<Self>> {
|
||||||
let _p = profile::span("trait_impls_in_block_query");
|
let _p = tracing::span!(tracing::Level::INFO, "trait_impls_in_block_query").entered();
|
||||||
let mut impls = FxHashMap::default();
|
let mut impls = FxHashMap::default();
|
||||||
|
|
||||||
Self::collect_def_map(db, &mut impls, &db.block_def_map(block));
|
Self::collect_def_map(db, &mut impls, &db.block_def_map(block));
|
||||||
|
@ -171,7 +172,8 @@ impl TraitImpls {
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
) -> Arc<[Arc<Self>]> {
|
) -> Arc<[Arc<Self>]> {
|
||||||
let _p = profile::span("trait_impls_in_deps_query").detail(|| format!("{krate:?}"));
|
let _p =
|
||||||
|
tracing::span!(tracing::Level::INFO, "trait_impls_in_deps_query", ?krate).entered();
|
||||||
let crate_graph = db.crate_graph();
|
let crate_graph = db.crate_graph();
|
||||||
|
|
||||||
Arc::from_iter(
|
Arc::from_iter(
|
||||||
|
@ -272,7 +274,8 @@ pub struct InherentImpls {
|
||||||
|
|
||||||
impl InherentImpls {
|
impl InherentImpls {
|
||||||
pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
|
pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
|
||||||
let _p = profile::span("inherent_impls_in_crate_query").detail(|| format!("{krate:?}"));
|
let _p =
|
||||||
|
tracing::span!(tracing::Level::INFO, "inherent_impls_in_crate_query", ?krate).entered();
|
||||||
let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
|
let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
|
||||||
|
|
||||||
let crate_def_map = db.crate_def_map(krate);
|
let crate_def_map = db.crate_def_map(krate);
|
||||||
|
@ -286,7 +289,7 @@ impl InherentImpls {
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
block: BlockId,
|
block: BlockId,
|
||||||
) -> Option<Arc<Self>> {
|
) -> Option<Arc<Self>> {
|
||||||
let _p = profile::span("inherent_impls_in_block_query");
|
let _p = tracing::span!(tracing::Level::INFO, "inherent_impls_in_block_query").entered();
|
||||||
let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
|
let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
|
||||||
|
|
||||||
let block_def_map = db.block_def_map(block);
|
let block_def_map = db.block_def_map(block);
|
||||||
|
@ -359,7 +362,7 @@ pub(crate) fn incoherent_inherent_impl_crates(
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
fp: TyFingerprint,
|
fp: TyFingerprint,
|
||||||
) -> SmallVec<[CrateId; 2]> {
|
) -> SmallVec<[CrateId; 2]> {
|
||||||
let _p = profile::span("inherent_impl_crates_query");
|
let _p = tracing::span!(tracing::Level::INFO, "inherent_impl_crates_query").entered();
|
||||||
let mut res = SmallVec::new();
|
let mut res = SmallVec::new();
|
||||||
let crate_graph = db.crate_graph();
|
let crate_graph = db.crate_graph();
|
||||||
|
|
||||||
|
|
|
@ -21,11 +21,11 @@ use hir_def::{
|
||||||
};
|
};
|
||||||
use la_arena::{Arena, ArenaMap, Idx, RawIdx};
|
use la_arena::{Arena, ArenaMap, Idx, RawIdx};
|
||||||
|
|
||||||
|
mod borrowck;
|
||||||
mod eval;
|
mod eval;
|
||||||
mod lower;
|
mod lower;
|
||||||
mod borrowck;
|
|
||||||
mod pretty;
|
|
||||||
mod monomorphization;
|
mod monomorphization;
|
||||||
|
mod pretty;
|
||||||
|
|
||||||
pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason};
|
pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason};
|
||||||
pub use eval::{
|
pub use eval::{
|
||||||
|
|
|
@ -71,7 +71,7 @@ pub fn borrowck_query(
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
def: DefWithBodyId,
|
def: DefWithBodyId,
|
||||||
) -> Result<Arc<[BorrowckResult]>, MirLowerError> {
|
) -> Result<Arc<[BorrowckResult]>, MirLowerError> {
|
||||||
let _p = profile::span("borrowck_query");
|
let _p = tracing::span!(tracing::Level::INFO, "borrowck_query").entered();
|
||||||
let mut res = vec![];
|
let mut res = vec![];
|
||||||
all_mir_bodies(db, def, |body| {
|
all_mir_bodies(db, def, |body| {
|
||||||
res.push(BorrowckResult {
|
res.push(BorrowckResult {
|
||||||
|
@ -444,7 +444,7 @@ fn mutability_of_locals(
|
||||||
}
|
}
|
||||||
if destination.projection.lookup(&body.projection_store).is_empty() {
|
if destination.projection.lookup(&body.projection_store).is_empty() {
|
||||||
if ever_init_map.get(destination.local).copied().unwrap_or_default() {
|
if ever_init_map.get(destination.local).copied().unwrap_or_default() {
|
||||||
push_mut_span(destination.local, MirSpan::Unknown, &mut result);
|
push_mut_span(destination.local, terminator.span, &mut result);
|
||||||
} else {
|
} else {
|
||||||
ever_init_map.insert(destination.local, true);
|
ever_init_map.insert(destination.local, true);
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,7 @@ use hir_def::{
|
||||||
use hir_expand::{mod_path::ModPath, HirFileIdExt, InFile};
|
use hir_expand::{mod_path::ModPath, HirFileIdExt, InFile};
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
use la_arena::ArenaMap;
|
use la_arena::ArenaMap;
|
||||||
|
use rustc_abi::TargetDataLayout;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use stdx::never;
|
use stdx::never;
|
||||||
use syntax::{SyntaxNodePtr, TextRange};
|
use syntax::{SyntaxNodePtr, TextRange};
|
||||||
|
@ -51,7 +52,7 @@ macro_rules! from_bytes {
|
||||||
($ty:tt, $value:expr) => {
|
($ty:tt, $value:expr) => {
|
||||||
($ty::from_le_bytes(match ($value).try_into() {
|
($ty::from_le_bytes(match ($value).try_into() {
|
||||||
Ok(it) => it,
|
Ok(it) => it,
|
||||||
Err(_) => return Err(MirEvalError::TypeError(stringify!(mismatched size in constructing $ty))),
|
Err(_) => return Err(MirEvalError::InternalError(stringify!(mismatched size in constructing $ty).into())),
|
||||||
}))
|
}))
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -145,6 +146,7 @@ enum MirOrDynIndex {
|
||||||
pub struct Evaluator<'a> {
|
pub struct Evaluator<'a> {
|
||||||
db: &'a dyn HirDatabase,
|
db: &'a dyn HirDatabase,
|
||||||
trait_env: Arc<TraitEnvironment>,
|
trait_env: Arc<TraitEnvironment>,
|
||||||
|
target_data_layout: Arc<TargetDataLayout>,
|
||||||
stack: Vec<u8>,
|
stack: Vec<u8>,
|
||||||
heap: Vec<u8>,
|
heap: Vec<u8>,
|
||||||
code_stack: Vec<StackFrame>,
|
code_stack: Vec<StackFrame>,
|
||||||
|
@ -316,12 +318,12 @@ impl Address {
|
||||||
pub enum MirEvalError {
|
pub enum MirEvalError {
|
||||||
ConstEvalError(String, Box<ConstEvalError>),
|
ConstEvalError(String, Box<ConstEvalError>),
|
||||||
LayoutError(LayoutError, Ty),
|
LayoutError(LayoutError, Ty),
|
||||||
/// Means that code had type errors (or mismatched args) and we shouldn't generate mir in first place.
|
TargetDataLayoutNotAvailable(Arc<str>),
|
||||||
TypeError(&'static str),
|
|
||||||
/// Means that code had undefined behavior. We don't try to actively detect UB, but if it was detected
|
/// Means that code had undefined behavior. We don't try to actively detect UB, but if it was detected
|
||||||
/// then use this type of error.
|
/// then use this type of error.
|
||||||
UndefinedBehavior(String),
|
UndefinedBehavior(String),
|
||||||
Panic(String),
|
Panic(String),
|
||||||
|
// FIXME: This should be folded into ConstEvalError?
|
||||||
MirLowerError(FunctionId, MirLowerError),
|
MirLowerError(FunctionId, MirLowerError),
|
||||||
MirLowerErrorForClosure(ClosureId, MirLowerError),
|
MirLowerErrorForClosure(ClosureId, MirLowerError),
|
||||||
TypeIsUnsized(Ty, &'static str),
|
TypeIsUnsized(Ty, &'static str),
|
||||||
|
@ -330,11 +332,12 @@ pub enum MirEvalError {
|
||||||
InFunction(Box<MirEvalError>, Vec<(Either<FunctionId, ClosureId>, MirSpan, DefWithBodyId)>),
|
InFunction(Box<MirEvalError>, Vec<(Either<FunctionId, ClosureId>, MirSpan, DefWithBodyId)>),
|
||||||
ExecutionLimitExceeded,
|
ExecutionLimitExceeded,
|
||||||
StackOverflow,
|
StackOverflow,
|
||||||
TargetDataLayoutNotAvailable,
|
/// FIXME: Fold this into InternalError
|
||||||
InvalidVTableId(usize),
|
InvalidVTableId(usize),
|
||||||
|
/// ?
|
||||||
CoerceUnsizedError(Ty),
|
CoerceUnsizedError(Ty),
|
||||||
LangItemNotFound(LangItem),
|
/// These should not occur, usually indicates a bug in mir lowering.
|
||||||
BrokenLayout(Box<Layout>),
|
InternalError(Box<str>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MirEvalError {
|
impl MirEvalError {
|
||||||
|
@ -359,8 +362,8 @@ impl MirEvalError {
|
||||||
func
|
func
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
Either::Right(clos) => {
|
Either::Right(closure) => {
|
||||||
writeln!(f, "In {:?}", clos)?;
|
writeln!(f, "In {:?}", closure)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let source_map = db.body_with_source_map(*def).1;
|
let source_map = db.body_with_source_map(*def).1;
|
||||||
|
@ -406,8 +409,8 @@ impl MirEvalError {
|
||||||
span_formatter,
|
span_formatter,
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
MirEvalError::TypeError(_)
|
MirEvalError::UndefinedBehavior(_)
|
||||||
| MirEvalError::UndefinedBehavior(_)
|
| MirEvalError::TargetDataLayoutNotAvailable(_)
|
||||||
| MirEvalError::Panic(_)
|
| MirEvalError::Panic(_)
|
||||||
| MirEvalError::MirLowerErrorForClosure(_, _)
|
| MirEvalError::MirLowerErrorForClosure(_, _)
|
||||||
| MirEvalError::TypeIsUnsized(_, _)
|
| MirEvalError::TypeIsUnsized(_, _)
|
||||||
|
@ -415,10 +418,8 @@ impl MirEvalError {
|
||||||
| MirEvalError::InvalidConst(_)
|
| MirEvalError::InvalidConst(_)
|
||||||
| MirEvalError::ExecutionLimitExceeded
|
| MirEvalError::ExecutionLimitExceeded
|
||||||
| MirEvalError::StackOverflow
|
| MirEvalError::StackOverflow
|
||||||
| MirEvalError::TargetDataLayoutNotAvailable
|
|
||||||
| MirEvalError::CoerceUnsizedError(_)
|
| MirEvalError::CoerceUnsizedError(_)
|
||||||
| MirEvalError::LangItemNotFound(_)
|
| MirEvalError::InternalError(_)
|
||||||
| MirEvalError::BrokenLayout(_)
|
|
||||||
| MirEvalError::InvalidVTableId(_) => writeln!(f, "{:?}", err)?,
|
| MirEvalError::InvalidVTableId(_) => writeln!(f, "{:?}", err)?,
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -431,16 +432,16 @@ impl std::fmt::Debug for MirEvalError {
|
||||||
Self::ConstEvalError(arg0, arg1) => {
|
Self::ConstEvalError(arg0, arg1) => {
|
||||||
f.debug_tuple("ConstEvalError").field(arg0).field(arg1).finish()
|
f.debug_tuple("ConstEvalError").field(arg0).field(arg1).finish()
|
||||||
}
|
}
|
||||||
Self::LangItemNotFound(arg0) => f.debug_tuple("LangItemNotFound").field(arg0).finish(),
|
|
||||||
Self::LayoutError(arg0, arg1) => {
|
Self::LayoutError(arg0, arg1) => {
|
||||||
f.debug_tuple("LayoutError").field(arg0).field(arg1).finish()
|
f.debug_tuple("LayoutError").field(arg0).field(arg1).finish()
|
||||||
}
|
}
|
||||||
Self::TypeError(arg0) => f.debug_tuple("TypeError").field(arg0).finish(),
|
|
||||||
Self::UndefinedBehavior(arg0) => {
|
Self::UndefinedBehavior(arg0) => {
|
||||||
f.debug_tuple("UndefinedBehavior").field(arg0).finish()
|
f.debug_tuple("UndefinedBehavior").field(arg0).finish()
|
||||||
}
|
}
|
||||||
Self::Panic(msg) => write!(f, "Panic with message:\n{msg:?}"),
|
Self::Panic(msg) => write!(f, "Panic with message:\n{msg:?}"),
|
||||||
Self::TargetDataLayoutNotAvailable => write!(f, "TargetDataLayoutNotAvailable"),
|
Self::TargetDataLayoutNotAvailable(arg0) => {
|
||||||
|
f.debug_tuple("TargetDataLayoutNotAvailable").field(arg0).finish()
|
||||||
|
}
|
||||||
Self::TypeIsUnsized(ty, it) => write!(f, "{ty:?} is unsized. {it} should be sized."),
|
Self::TypeIsUnsized(ty, it) => write!(f, "{ty:?} is unsized. {it} should be sized."),
|
||||||
Self::ExecutionLimitExceeded => write!(f, "execution limit exceeded"),
|
Self::ExecutionLimitExceeded => write!(f, "execution limit exceeded"),
|
||||||
Self::StackOverflow => write!(f, "stack overflow"),
|
Self::StackOverflow => write!(f, "stack overflow"),
|
||||||
|
@ -453,7 +454,7 @@ impl std::fmt::Debug for MirEvalError {
|
||||||
Self::CoerceUnsizedError(arg0) => {
|
Self::CoerceUnsizedError(arg0) => {
|
||||||
f.debug_tuple("CoerceUnsizedError").field(arg0).finish()
|
f.debug_tuple("CoerceUnsizedError").field(arg0).finish()
|
||||||
}
|
}
|
||||||
Self::BrokenLayout(arg0) => f.debug_tuple("BrokenLayout").field(arg0).finish(),
|
Self::InternalError(arg0) => f.debug_tuple("InternalError").field(arg0).finish(),
|
||||||
Self::InvalidVTableId(arg0) => f.debug_tuple("InvalidVTableId").field(arg0).finish(),
|
Self::InvalidVTableId(arg0) => f.debug_tuple("InvalidVTableId").field(arg0).finish(),
|
||||||
Self::NotSupported(arg0) => f.debug_tuple("NotSupported").field(arg0).finish(),
|
Self::NotSupported(arg0) => f.debug_tuple("NotSupported").field(arg0).finish(),
|
||||||
Self::InvalidConst(arg0) => {
|
Self::InvalidConst(arg0) => {
|
||||||
|
@ -530,7 +531,11 @@ pub fn interpret_mir(
|
||||||
trait_env: Option<Arc<TraitEnvironment>>,
|
trait_env: Option<Arc<TraitEnvironment>>,
|
||||||
) -> (Result<Const>, MirOutput) {
|
) -> (Result<Const>, MirOutput) {
|
||||||
let ty = body.locals[return_slot()].ty.clone();
|
let ty = body.locals[return_slot()].ty.clone();
|
||||||
let mut evaluator = Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env);
|
let mut evaluator =
|
||||||
|
match Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env) {
|
||||||
|
Ok(it) => it,
|
||||||
|
Err(e) => return (Err(e), MirOutput { stdout: vec![], stderr: vec![] }),
|
||||||
|
};
|
||||||
let it: Result<Const> = (|| {
|
let it: Result<Const> = (|| {
|
||||||
if evaluator.ptr_size() != std::mem::size_of::<usize>() {
|
if evaluator.ptr_size() != std::mem::size_of::<usize>() {
|
||||||
not_supported!("targets with different pointer size from host");
|
not_supported!("targets with different pointer size from host");
|
||||||
|
@ -566,9 +571,15 @@ impl Evaluator<'_> {
|
||||||
owner: DefWithBodyId,
|
owner: DefWithBodyId,
|
||||||
assert_placeholder_ty_is_unused: bool,
|
assert_placeholder_ty_is_unused: bool,
|
||||||
trait_env: Option<Arc<TraitEnvironment>>,
|
trait_env: Option<Arc<TraitEnvironment>>,
|
||||||
) -> Evaluator<'_> {
|
) -> Result<Evaluator<'_>> {
|
||||||
let crate_id = owner.module(db.upcast()).krate();
|
let crate_id = owner.module(db.upcast()).krate();
|
||||||
Evaluator {
|
let target_data_layout = match db.target_data_layout(crate_id) {
|
||||||
|
Ok(target_data_layout) => target_data_layout,
|
||||||
|
Err(e) => return Err(MirEvalError::TargetDataLayoutNotAvailable(e)),
|
||||||
|
};
|
||||||
|
let cached_ptr_size = target_data_layout.pointer_size.bytes_usize();
|
||||||
|
Ok(Evaluator {
|
||||||
|
target_data_layout,
|
||||||
stack: vec![0],
|
stack: vec![0],
|
||||||
heap: vec![0],
|
heap: vec![0],
|
||||||
code_stack: vec![],
|
code_stack: vec![],
|
||||||
|
@ -590,10 +601,7 @@ impl Evaluator<'_> {
|
||||||
not_special_fn_cache: RefCell::new(Default::default()),
|
not_special_fn_cache: RefCell::new(Default::default()),
|
||||||
mir_or_dyn_index_cache: RefCell::new(Default::default()),
|
mir_or_dyn_index_cache: RefCell::new(Default::default()),
|
||||||
unused_locals_store: RefCell::new(Default::default()),
|
unused_locals_store: RefCell::new(Default::default()),
|
||||||
cached_ptr_size: match db.target_data_layout(crate_id) {
|
cached_ptr_size,
|
||||||
Some(it) => it.pointer_size.bytes_usize(),
|
|
||||||
None => 8,
|
|
||||||
},
|
|
||||||
cached_fn_trait_func: db
|
cached_fn_trait_func: db
|
||||||
.lang_item(crate_id, LangItem::Fn)
|
.lang_item(crate_id, LangItem::Fn)
|
||||||
.and_then(|x| x.as_trait())
|
.and_then(|x| x.as_trait())
|
||||||
|
@ -606,7 +614,7 @@ impl Evaluator<'_> {
|
||||||
.lang_item(crate_id, LangItem::FnOnce)
|
.lang_item(crate_id, LangItem::FnOnce)
|
||||||
.and_then(|x| x.as_trait())
|
.and_then(|x| x.as_trait())
|
||||||
.and_then(|x| db.trait_data(x).method_by_name(&name![call_once])),
|
.and_then(|x| db.trait_data(x).method_by_name(&name![call_once])),
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn place_addr(&self, p: &Place, locals: &Locals) -> Result<Address> {
|
fn place_addr(&self, p: &Place, locals: &Locals) -> Result<Address> {
|
||||||
|
@ -754,8 +762,8 @@ impl Evaluator<'_> {
|
||||||
RustcEnumVariantIdx(it.lookup(self.db.upcast()).index as usize)
|
RustcEnumVariantIdx(it.lookup(self.db.upcast()).index as usize)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"Multivariant layout only happens for enums",
|
"mismatched layout".into(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
}]
|
}]
|
||||||
|
@ -993,12 +1001,12 @@ impl Evaluator<'_> {
|
||||||
IntervalOrOwned::Borrowed(value) => interval.write_from_interval(self, value)?,
|
IntervalOrOwned::Borrowed(value) => interval.write_from_interval(self, value)?,
|
||||||
}
|
}
|
||||||
if remain_args == 0 {
|
if remain_args == 0 {
|
||||||
return Err(MirEvalError::TypeError("more arguments provided"));
|
return Err(MirEvalError::InternalError("too many arguments".into()));
|
||||||
}
|
}
|
||||||
remain_args -= 1;
|
remain_args -= 1;
|
||||||
}
|
}
|
||||||
if remain_args > 0 {
|
if remain_args > 0 {
|
||||||
return Err(MirEvalError::TypeError("not enough arguments provided"));
|
return Err(MirEvalError::InternalError("too few arguments".into()));
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1071,8 +1079,8 @@ impl Evaluator<'_> {
|
||||||
match metadata {
|
match metadata {
|
||||||
Some(m) => m,
|
Some(m) => m,
|
||||||
None => {
|
None => {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"type without metadata is used for Rvalue::Len",
|
"type without metadata is used for Rvalue::Len".into(),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1312,7 +1320,7 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
AggregateKind::Tuple(ty) => {
|
AggregateKind::Tuple(ty) => {
|
||||||
let layout = self.layout(ty)?;
|
let layout = self.layout(ty)?;
|
||||||
Owned(self.make_by_layout(
|
Owned(self.construct_with_layout(
|
||||||
layout.size.bytes_usize(),
|
layout.size.bytes_usize(),
|
||||||
&layout,
|
&layout,
|
||||||
None,
|
None,
|
||||||
|
@ -1334,7 +1342,7 @@ impl Evaluator<'_> {
|
||||||
AggregateKind::Adt(it, subst) => {
|
AggregateKind::Adt(it, subst) => {
|
||||||
let (size, variant_layout, tag) =
|
let (size, variant_layout, tag) =
|
||||||
self.layout_of_variant(*it, subst.clone(), locals)?;
|
self.layout_of_variant(*it, subst.clone(), locals)?;
|
||||||
Owned(self.make_by_layout(
|
Owned(self.construct_with_layout(
|
||||||
size,
|
size,
|
||||||
&variant_layout,
|
&variant_layout,
|
||||||
tag,
|
tag,
|
||||||
|
@ -1343,7 +1351,7 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
AggregateKind::Closure(ty) => {
|
AggregateKind::Closure(ty) => {
|
||||||
let layout = self.layout(ty)?;
|
let layout = self.layout(ty)?;
|
||||||
Owned(self.make_by_layout(
|
Owned(self.construct_with_layout(
|
||||||
layout.size.bytes_usize(),
|
layout.size.bytes_usize(),
|
||||||
&layout,
|
&layout,
|
||||||
None,
|
None,
|
||||||
|
@ -1415,10 +1423,7 @@ impl Evaluator<'_> {
|
||||||
Ok(r)
|
Ok(r)
|
||||||
}
|
}
|
||||||
Variants::Multiple { tag, tag_encoding, variants, .. } => {
|
Variants::Multiple { tag, tag_encoding, variants, .. } => {
|
||||||
let Some(target_data_layout) = self.db.target_data_layout(self.crate_id) else {
|
let size = tag.size(&*self.target_data_layout).bytes_usize();
|
||||||
not_supported!("missing target data layout");
|
|
||||||
};
|
|
||||||
let size = tag.size(&*target_data_layout).bytes_usize();
|
|
||||||
let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field
|
let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field
|
||||||
match tag_encoding {
|
match tag_encoding {
|
||||||
TagEncoding::Direct => {
|
TagEncoding::Direct => {
|
||||||
|
@ -1458,9 +1463,8 @@ impl Evaluator<'_> {
|
||||||
if let TyKind::Adt(id, subst) = kind {
|
if let TyKind::Adt(id, subst) = kind {
|
||||||
if let AdtId::StructId(struct_id) = id.0 {
|
if let AdtId::StructId(struct_id) = id.0 {
|
||||||
let field_types = self.db.field_types(struct_id.into());
|
let field_types = self.db.field_types(struct_id.into());
|
||||||
let mut field_types = field_types.iter();
|
|
||||||
if let Some(ty) =
|
if let Some(ty) =
|
||||||
field_types.next().map(|it| it.1.clone().substitute(Interner, subst))
|
field_types.iter().last().map(|it| it.1.clone().substitute(Interner, subst))
|
||||||
{
|
{
|
||||||
return self.coerce_unsized_look_through_fields(&ty, goal);
|
return self.coerce_unsized_look_through_fields(&ty, goal);
|
||||||
}
|
}
|
||||||
|
@ -1578,10 +1582,6 @@ impl Evaluator<'_> {
|
||||||
Ok(match &layout.variants {
|
Ok(match &layout.variants {
|
||||||
Variants::Single { .. } => (layout.size.bytes_usize(), layout, None),
|
Variants::Single { .. } => (layout.size.bytes_usize(), layout, None),
|
||||||
Variants::Multiple { variants, tag, tag_encoding, .. } => {
|
Variants::Multiple { variants, tag, tag_encoding, .. } => {
|
||||||
let cx = self
|
|
||||||
.db
|
|
||||||
.target_data_layout(self.crate_id)
|
|
||||||
.ok_or(MirEvalError::TargetDataLayoutNotAvailable)?;
|
|
||||||
let enum_variant_id = match it {
|
let enum_variant_id = match it {
|
||||||
VariantId::EnumVariantId(it) => it,
|
VariantId::EnumVariantId(it) => it,
|
||||||
_ => not_supported!("multi variant layout for non-enums"),
|
_ => not_supported!("multi variant layout for non-enums"),
|
||||||
|
@ -1612,7 +1612,7 @@ impl Evaluator<'_> {
|
||||||
if have_tag {
|
if have_tag {
|
||||||
Some((
|
Some((
|
||||||
layout.fields.offset(0).bytes_usize(),
|
layout.fields.offset(0).bytes_usize(),
|
||||||
tag.size(&*cx).bytes_usize(),
|
tag.size(&*self.target_data_layout).bytes_usize(),
|
||||||
discriminant,
|
discriminant,
|
||||||
))
|
))
|
||||||
} else {
|
} else {
|
||||||
|
@ -1623,7 +1623,7 @@ impl Evaluator<'_> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_by_layout(
|
fn construct_with_layout(
|
||||||
&mut self,
|
&mut self,
|
||||||
size: usize, // Not necessarily equal to variant_layout.size
|
size: usize, // Not necessarily equal to variant_layout.size
|
||||||
variant_layout: &Layout,
|
variant_layout: &Layout,
|
||||||
|
@ -1634,7 +1634,14 @@ impl Evaluator<'_> {
|
||||||
if let Some((offset, size, value)) = tag {
|
if let Some((offset, size, value)) = tag {
|
||||||
match result.get_mut(offset..offset + size) {
|
match result.get_mut(offset..offset + size) {
|
||||||
Some(it) => it.copy_from_slice(&value.to_le_bytes()[0..size]),
|
Some(it) => it.copy_from_slice(&value.to_le_bytes()[0..size]),
|
||||||
None => return Err(MirEvalError::BrokenLayout(Box::new(variant_layout.clone()))),
|
None => {
|
||||||
|
return Err(MirEvalError::InternalError(
|
||||||
|
format!(
|
||||||
|
"encoded tag ({offset}, {size}, {value}) is out of bounds 0..{size}"
|
||||||
|
)
|
||||||
|
.into(),
|
||||||
|
))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (i, op) in values.enumerate() {
|
for (i, op) in values.enumerate() {
|
||||||
|
@ -1642,7 +1649,11 @@ impl Evaluator<'_> {
|
||||||
let op = op.get(self)?;
|
let op = op.get(self)?;
|
||||||
match result.get_mut(offset..offset + op.len()) {
|
match result.get_mut(offset..offset + op.len()) {
|
||||||
Some(it) => it.copy_from_slice(op),
|
Some(it) => it.copy_from_slice(op),
|
||||||
None => return Err(MirEvalError::BrokenLayout(Box::new(variant_layout.clone()))),
|
None => {
|
||||||
|
return Err(MirEvalError::InternalError(
|
||||||
|
format!("field offset ({offset}) is out of bounds 0..{size}").into(),
|
||||||
|
))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(result)
|
Ok(result)
|
||||||
|
@ -1695,28 +1706,29 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
ConstScalar::Unknown => not_supported!("evaluating unknown const"),
|
ConstScalar::Unknown => not_supported!("evaluating unknown const"),
|
||||||
};
|
};
|
||||||
let mut v: Cow<'_, [u8]> = Cow::Borrowed(v);
|
|
||||||
let patch_map = memory_map.transform_addresses(|b, align| {
|
let patch_map = memory_map.transform_addresses(|b, align| {
|
||||||
let addr = self.heap_allocate(b.len(), align)?;
|
let addr = self.heap_allocate(b.len(), align)?;
|
||||||
self.write_memory(addr, b)?;
|
self.write_memory(addr, b)?;
|
||||||
Ok(addr.to_usize())
|
Ok(addr.to_usize())
|
||||||
})?;
|
})?;
|
||||||
let (size, align) = self.size_align_of(ty, locals)?.unwrap_or((v.len(), 1));
|
let (size, align) = self.size_align_of(ty, locals)?.unwrap_or((v.len(), 1));
|
||||||
if size != v.len() {
|
let v: Cow<'_, [u8]> = if size != v.len() {
|
||||||
// Handle self enum
|
// Handle self enum
|
||||||
if size == 16 && v.len() < 16 {
|
if size == 16 && v.len() < 16 {
|
||||||
v = Cow::Owned(pad16(&v, false).to_vec());
|
Cow::Owned(pad16(v, false).to_vec())
|
||||||
} else if size < 16 && v.len() == 16 {
|
} else if size < 16 && v.len() == 16 {
|
||||||
v = Cow::Owned(v[0..size].to_vec());
|
Cow::Borrowed(&v[0..size])
|
||||||
} else {
|
} else {
|
||||||
return Err(MirEvalError::InvalidConst(konst.clone()));
|
return Err(MirEvalError::InvalidConst(konst.clone()));
|
||||||
}
|
}
|
||||||
}
|
} else {
|
||||||
|
Cow::Borrowed(v)
|
||||||
|
};
|
||||||
let addr = self.heap_allocate(size, align)?;
|
let addr = self.heap_allocate(size, align)?;
|
||||||
self.write_memory(addr, &v)?;
|
self.write_memory(addr, &v)?;
|
||||||
self.patch_addresses(
|
self.patch_addresses(
|
||||||
&patch_map,
|
&patch_map,
|
||||||
|bytes| match &memory_map {
|
|bytes| match memory_map {
|
||||||
MemoryMap::Empty | MemoryMap::Simple(_) => {
|
MemoryMap::Empty | MemoryMap::Simple(_) => {
|
||||||
Err(MirEvalError::InvalidVTableId(from_bytes!(usize, bytes)))
|
Err(MirEvalError::InvalidVTableId(from_bytes!(usize, bytes)))
|
||||||
}
|
}
|
||||||
|
@ -2000,7 +2012,7 @@ impl Evaluator<'_> {
|
||||||
if let Some((v, l)) = detect_variant_from_bytes(
|
if let Some((v, l)) = detect_variant_from_bytes(
|
||||||
&layout,
|
&layout,
|
||||||
this.db,
|
this.db,
|
||||||
this.trait_env.clone(),
|
&this.target_data_layout,
|
||||||
bytes,
|
bytes,
|
||||||
e,
|
e,
|
||||||
) {
|
) {
|
||||||
|
@ -2079,7 +2091,7 @@ impl Evaluator<'_> {
|
||||||
if let Some((ev, layout)) = detect_variant_from_bytes(
|
if let Some((ev, layout)) = detect_variant_from_bytes(
|
||||||
&layout,
|
&layout,
|
||||||
self.db,
|
self.db,
|
||||||
self.trait_env.clone(),
|
&self.target_data_layout,
|
||||||
self.read_memory(addr, layout.size.bytes_usize())?,
|
self.read_memory(addr, layout.size.bytes_usize())?,
|
||||||
e,
|
e,
|
||||||
) {
|
) {
|
||||||
|
@ -2153,14 +2165,14 @@ impl Evaluator<'_> {
|
||||||
) -> Result<Option<StackFrame>> {
|
) -> Result<Option<StackFrame>> {
|
||||||
let id = from_bytes!(usize, bytes.get(self)?);
|
let id = from_bytes!(usize, bytes.get(self)?);
|
||||||
let next_ty = self.vtable_map.ty(id)?.clone();
|
let next_ty = self.vtable_map.ty(id)?.clone();
|
||||||
match &next_ty.kind(Interner) {
|
match next_ty.kind(Interner) {
|
||||||
TyKind::FnDef(def, generic_args) => {
|
TyKind::FnDef(def, generic_args) => {
|
||||||
self.exec_fn_def(*def, generic_args, destination, args, locals, target_bb, span)
|
self.exec_fn_def(*def, generic_args, destination, args, locals, target_bb, span)
|
||||||
}
|
}
|
||||||
TyKind::Closure(id, subst) => {
|
TyKind::Closure(id, subst) => {
|
||||||
self.exec_closure(*id, bytes.slice(0..0), subst, destination, args, locals, span)
|
self.exec_closure(*id, bytes.slice(0..0), subst, destination, args, locals, span)
|
||||||
}
|
}
|
||||||
_ => Err(MirEvalError::TypeError("function pointer to non function")),
|
_ => Err(MirEvalError::InternalError("function pointer to non function".into())),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2241,7 +2253,7 @@ impl Evaluator<'_> {
|
||||||
CallableDefId::StructId(id) => {
|
CallableDefId::StructId(id) => {
|
||||||
let (size, variant_layout, tag) =
|
let (size, variant_layout, tag) =
|
||||||
self.layout_of_variant(id.into(), generic_args, locals)?;
|
self.layout_of_variant(id.into(), generic_args, locals)?;
|
||||||
let result = self.make_by_layout(
|
let result = self.construct_with_layout(
|
||||||
size,
|
size,
|
||||||
&variant_layout,
|
&variant_layout,
|
||||||
tag,
|
tag,
|
||||||
|
@ -2253,7 +2265,7 @@ impl Evaluator<'_> {
|
||||||
CallableDefId::EnumVariantId(id) => {
|
CallableDefId::EnumVariantId(id) => {
|
||||||
let (size, variant_layout, tag) =
|
let (size, variant_layout, tag) =
|
||||||
self.layout_of_variant(id.into(), generic_args, locals)?;
|
self.layout_of_variant(id.into(), generic_args, locals)?;
|
||||||
let result = self.make_by_layout(
|
let result = self.construct_with_layout(
|
||||||
size,
|
size,
|
||||||
&variant_layout,
|
&variant_layout,
|
||||||
tag,
|
tag,
|
||||||
|
@ -2407,7 +2419,9 @@ impl Evaluator<'_> {
|
||||||
target_bb: Option<BasicBlockId>,
|
target_bb: Option<BasicBlockId>,
|
||||||
span: MirSpan,
|
span: MirSpan,
|
||||||
) -> Result<Option<StackFrame>> {
|
) -> Result<Option<StackFrame>> {
|
||||||
let func = args.first().ok_or(MirEvalError::TypeError("fn trait with no arg"))?;
|
let func = args
|
||||||
|
.first()
|
||||||
|
.ok_or_else(|| MirEvalError::InternalError("fn trait with no arg".into()))?;
|
||||||
let mut func_ty = func.ty.clone();
|
let mut func_ty = func.ty.clone();
|
||||||
let mut func_data = func.interval;
|
let mut func_data = func.interval;
|
||||||
while let TyKind::Ref(_, _, z) = func_ty.kind(Interner) {
|
while let TyKind::Ref(_, _, z) = func_ty.kind(Interner) {
|
||||||
|
@ -2450,7 +2464,7 @@ impl Evaluator<'_> {
|
||||||
)
|
)
|
||||||
.intern(Interner);
|
.intern(Interner);
|
||||||
let layout = self.layout(&ty)?;
|
let layout = self.layout(&ty)?;
|
||||||
let result = self.make_by_layout(
|
let result = self.construct_with_layout(
|
||||||
layout.size.bytes_usize(),
|
layout.size.bytes_usize(),
|
||||||
&layout,
|
&layout,
|
||||||
None,
|
None,
|
||||||
|
@ -2634,7 +2648,7 @@ pub fn render_const_using_debug_impl(
|
||||||
owner: ConstId,
|
owner: ConstId,
|
||||||
c: &Const,
|
c: &Const,
|
||||||
) -> Result<String> {
|
) -> Result<String> {
|
||||||
let mut evaluator = Evaluator::new(db, owner.into(), false, None);
|
let mut evaluator = Evaluator::new(db, owner.into(), false, None)?;
|
||||||
let locals = &Locals {
|
let locals = &Locals {
|
||||||
ptr: ArenaMap::new(),
|
ptr: ArenaMap::new(),
|
||||||
body: db
|
body: db
|
||||||
|
@ -2699,12 +2713,7 @@ pub fn render_const_using_debug_impl(
|
||||||
|
|
||||||
pub fn pad16(it: &[u8], is_signed: bool) -> [u8; 16] {
|
pub fn pad16(it: &[u8], is_signed: bool) -> [u8; 16] {
|
||||||
let is_negative = is_signed && it.last().unwrap_or(&0) > &127;
|
let is_negative = is_signed && it.last().unwrap_or(&0) > &127;
|
||||||
let fill_with = if is_negative { 255 } else { 0 };
|
let mut res = [if is_negative { 255 } else { 0 }; 16];
|
||||||
it.iter()
|
res[..it.len()].copy_from_slice(it);
|
||||||
.copied()
|
res
|
||||||
.chain(iter::repeat(fill_with))
|
|
||||||
.take(16)
|
|
||||||
.collect::<Vec<u8>>()
|
|
||||||
.try_into()
|
|
||||||
.expect("iterator take is not working")
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,7 @@ macro_rules! from_bytes {
|
||||||
($ty:tt, $value:expr) => {
|
($ty:tt, $value:expr) => {
|
||||||
($ty::from_le_bytes(match ($value).try_into() {
|
($ty::from_le_bytes(match ($value).try_into() {
|
||||||
Ok(it) => it,
|
Ok(it) => it,
|
||||||
Err(_) => return Err(MirEvalError::TypeError("mismatched size")),
|
Err(_) => return Err(MirEvalError::InternalError("mismatched size".into())),
|
||||||
}))
|
}))
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -249,7 +249,9 @@ impl Evaluator<'_> {
|
||||||
match alloc_fn {
|
match alloc_fn {
|
||||||
"rustc_allocator_zeroed" | "rustc_allocator" => {
|
"rustc_allocator_zeroed" | "rustc_allocator" => {
|
||||||
let [size, align] = args else {
|
let [size, align] = args else {
|
||||||
return Err(MirEvalError::TypeError("rustc_allocator args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"rustc_allocator args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let size = from_bytes!(usize, size.get(self)?);
|
let size = from_bytes!(usize, size.get(self)?);
|
||||||
let align = from_bytes!(usize, align.get(self)?);
|
let align = from_bytes!(usize, align.get(self)?);
|
||||||
|
@ -259,7 +261,9 @@ impl Evaluator<'_> {
|
||||||
"rustc_deallocator" => { /* no-op for now */ }
|
"rustc_deallocator" => { /* no-op for now */ }
|
||||||
"rustc_reallocator" => {
|
"rustc_reallocator" => {
|
||||||
let [ptr, old_size, align, new_size] = args else {
|
let [ptr, old_size, align, new_size] = args else {
|
||||||
return Err(MirEvalError::TypeError("rustc_allocator args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"rustc_allocator args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let old_size = from_bytes!(usize, old_size.get(self)?);
|
let old_size = from_bytes!(usize, old_size.get(self)?);
|
||||||
let new_size = from_bytes!(usize, new_size.get(self)?);
|
let new_size = from_bytes!(usize, new_size.get(self)?);
|
||||||
|
@ -339,22 +343,22 @@ impl Evaluator<'_> {
|
||||||
Err(MirEvalError::Panic(message))
|
Err(MirEvalError::Panic(message))
|
||||||
}
|
}
|
||||||
SliceLen => {
|
SliceLen => {
|
||||||
let arg = args
|
let arg = args.next().ok_or(MirEvalError::InternalError(
|
||||||
.next()
|
"argument of <[T]>::len() is not provided".into(),
|
||||||
.ok_or(MirEvalError::TypeError("argument of <[T]>::len() is not provided"))?;
|
))?;
|
||||||
let ptr_size = arg.len() / 2;
|
let ptr_size = arg.len() / 2;
|
||||||
Ok(arg[ptr_size..].into())
|
Ok(arg[ptr_size..].into())
|
||||||
}
|
}
|
||||||
DropInPlace => {
|
DropInPlace => {
|
||||||
let ty =
|
let ty =
|
||||||
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)).ok_or(
|
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)).ok_or(
|
||||||
MirEvalError::TypeError(
|
MirEvalError::InternalError(
|
||||||
"generic argument of drop_in_place is not provided",
|
"generic argument of drop_in_place is not provided".into(),
|
||||||
),
|
),
|
||||||
)?;
|
)?;
|
||||||
let arg = args
|
let arg = args.next().ok_or(MirEvalError::InternalError(
|
||||||
.next()
|
"argument of drop_in_place is not provided".into(),
|
||||||
.ok_or(MirEvalError::TypeError("argument of drop_in_place is not provided"))?;
|
))?;
|
||||||
self.run_drop_glue_deep(
|
self.run_drop_glue_deep(
|
||||||
ty.clone(),
|
ty.clone(),
|
||||||
locals,
|
locals,
|
||||||
|
@ -380,7 +384,9 @@ impl Evaluator<'_> {
|
||||||
318 => {
|
318 => {
|
||||||
// SYS_getrandom
|
// SYS_getrandom
|
||||||
let [buf, len, _flags] = args else {
|
let [buf, len, _flags] = args else {
|
||||||
return Err(MirEvalError::TypeError("SYS_getrandom args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"SYS_getrandom args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let addr = Address::from_bytes(buf.get(self)?)?;
|
let addr = Address::from_bytes(buf.get(self)?)?;
|
||||||
let size = from_bytes!(usize, len.get(self)?);
|
let size = from_bytes!(usize, len.get(self)?);
|
||||||
|
@ -408,7 +414,7 @@ impl Evaluator<'_> {
|
||||||
match as_str {
|
match as_str {
|
||||||
"memcmp" => {
|
"memcmp" => {
|
||||||
let [ptr1, ptr2, size] = args else {
|
let [ptr1, ptr2, size] = args else {
|
||||||
return Err(MirEvalError::TypeError("memcmp args are not provided"));
|
return Err(MirEvalError::InternalError("memcmp args are not provided".into()));
|
||||||
};
|
};
|
||||||
let addr1 = Address::from_bytes(ptr1.get(self)?)?;
|
let addr1 = Address::from_bytes(ptr1.get(self)?)?;
|
||||||
let addr2 = Address::from_bytes(ptr2.get(self)?)?;
|
let addr2 = Address::from_bytes(ptr2.get(self)?)?;
|
||||||
|
@ -424,7 +430,9 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"write" => {
|
"write" => {
|
||||||
let [fd, ptr, len] = args else {
|
let [fd, ptr, len] = args else {
|
||||||
return Err(MirEvalError::TypeError("libc::write args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"libc::write args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let fd = u128::from_le_bytes(pad16(fd.get(self)?, false));
|
let fd = u128::from_le_bytes(pad16(fd.get(self)?, false));
|
||||||
let interval = Interval {
|
let interval = Interval {
|
||||||
|
@ -446,14 +454,16 @@ impl Evaluator<'_> {
|
||||||
"pthread_key_create" => {
|
"pthread_key_create" => {
|
||||||
let key = self.thread_local_storage.create_key();
|
let key = self.thread_local_storage.create_key();
|
||||||
let Some(arg0) = args.first() else {
|
let Some(arg0) = args.first() else {
|
||||||
return Err(MirEvalError::TypeError("pthread_key_create arg0 is not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"pthread_key_create arg0 is not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let arg0_addr = Address::from_bytes(arg0.get(self)?)?;
|
let arg0_addr = Address::from_bytes(arg0.get(self)?)?;
|
||||||
let key_ty = if let Some((ty, ..)) = arg0.ty.as_reference_or_ptr() {
|
let key_ty = if let Some((ty, ..)) = arg0.ty.as_reference_or_ptr() {
|
||||||
ty
|
ty
|
||||||
} else {
|
} else {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"pthread_key_create arg0 is not a pointer",
|
"pthread_key_create arg0 is not a pointer".into(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let arg0_interval = Interval::new(
|
let arg0_interval = Interval::new(
|
||||||
|
@ -467,8 +477,8 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"pthread_getspecific" => {
|
"pthread_getspecific" => {
|
||||||
let Some(arg0) = args.first() else {
|
let Some(arg0) = args.first() else {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"pthread_getspecific arg0 is not provided",
|
"pthread_getspecific arg0 is not provided".into(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
|
let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
|
||||||
|
@ -478,14 +488,14 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"pthread_setspecific" => {
|
"pthread_setspecific" => {
|
||||||
let Some(arg0) = args.first() else {
|
let Some(arg0) = args.first() else {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"pthread_setspecific arg0 is not provided",
|
"pthread_setspecific arg0 is not provided".into(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
|
let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
|
||||||
let Some(arg1) = args.get(1) else {
|
let Some(arg1) = args.get(1) else {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"pthread_setspecific arg1 is not provided",
|
"pthread_setspecific arg1 is not provided".into(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let value = from_bytes!(u128, pad16(arg1.get(self)?, false));
|
let value = from_bytes!(u128, pad16(arg1.get(self)?, false));
|
||||||
|
@ -502,14 +512,16 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"syscall" => {
|
"syscall" => {
|
||||||
let Some((id, rest)) = args.split_first() else {
|
let Some((id, rest)) = args.split_first() else {
|
||||||
return Err(MirEvalError::TypeError("syscall arg1 is not provided"));
|
return Err(MirEvalError::InternalError("syscall arg1 is not provided".into()));
|
||||||
};
|
};
|
||||||
let id = from_bytes!(i64, id.get(self)?);
|
let id = from_bytes!(i64, id.get(self)?);
|
||||||
self.exec_syscall(id, rest, destination, locals, span)
|
self.exec_syscall(id, rest, destination, locals, span)
|
||||||
}
|
}
|
||||||
"sched_getaffinity" => {
|
"sched_getaffinity" => {
|
||||||
let [_pid, _set_size, set] = args else {
|
let [_pid, _set_size, set] = args else {
|
||||||
return Err(MirEvalError::TypeError("libc::write args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"libc::write args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let set = Address::from_bytes(set.get(self)?)?;
|
let set = Address::from_bytes(set.get(self)?)?;
|
||||||
// Only enable core 0 (we are single threaded anyway), which is bitset 0x0000001
|
// Only enable core 0 (we are single threaded anyway), which is bitset 0x0000001
|
||||||
|
@ -520,7 +532,9 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"getenv" => {
|
"getenv" => {
|
||||||
let [name] = args else {
|
let [name] = args else {
|
||||||
return Err(MirEvalError::TypeError("libc::write args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"libc::write args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let mut name_buf = vec![];
|
let mut name_buf = vec![];
|
||||||
let name = {
|
let name = {
|
||||||
|
@ -586,8 +600,8 @@ impl Evaluator<'_> {
|
||||||
"sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
|
"sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
|
||||||
| "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => {
|
| "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => {
|
||||||
let [arg] = args else {
|
let [arg] = args else {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"f64 intrinsic signature doesn't match fn (f64) -> f64",
|
"f64 intrinsic signature doesn't match fn (f64) -> f64".into(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let arg = from_bytes!(f64, arg.get(self)?);
|
let arg = from_bytes!(f64, arg.get(self)?);
|
||||||
|
@ -614,8 +628,8 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"pow" | "minnum" | "maxnum" | "copysign" => {
|
"pow" | "minnum" | "maxnum" | "copysign" => {
|
||||||
let [arg1, arg2] = args else {
|
let [arg1, arg2] = args else {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"f64 intrinsic signature doesn't match fn (f64, f64) -> f64",
|
"f64 intrinsic signature doesn't match fn (f64, f64) -> f64".into(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let arg1 = from_bytes!(f64, arg1.get(self)?);
|
let arg1 = from_bytes!(f64, arg1.get(self)?);
|
||||||
|
@ -630,8 +644,8 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"powi" => {
|
"powi" => {
|
||||||
let [arg1, arg2] = args else {
|
let [arg1, arg2] = args else {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"powif64 signature doesn't match fn (f64, i32) -> f64",
|
"powif64 signature doesn't match fn (f64, i32) -> f64".into(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let arg1 = from_bytes!(f64, arg1.get(self)?);
|
let arg1 = from_bytes!(f64, arg1.get(self)?);
|
||||||
|
@ -640,8 +654,8 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"fma" => {
|
"fma" => {
|
||||||
let [arg1, arg2, arg3] = args else {
|
let [arg1, arg2, arg3] = args else {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"fmaf64 signature doesn't match fn (f64, f64, f64) -> f64",
|
"fmaf64 signature doesn't match fn (f64, f64, f64) -> f64".into(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let arg1 = from_bytes!(f64, arg1.get(self)?);
|
let arg1 = from_bytes!(f64, arg1.get(self)?);
|
||||||
|
@ -658,8 +672,8 @@ impl Evaluator<'_> {
|
||||||
"sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
|
"sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
|
||||||
| "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => {
|
| "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => {
|
||||||
let [arg] = args else {
|
let [arg] = args else {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"f32 intrinsic signature doesn't match fn (f32) -> f32",
|
"f32 intrinsic signature doesn't match fn (f32) -> f32".into(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let arg = from_bytes!(f32, arg.get(self)?);
|
let arg = from_bytes!(f32, arg.get(self)?);
|
||||||
|
@ -686,8 +700,8 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"pow" | "minnum" | "maxnum" | "copysign" => {
|
"pow" | "minnum" | "maxnum" | "copysign" => {
|
||||||
let [arg1, arg2] = args else {
|
let [arg1, arg2] = args else {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"f32 intrinsic signature doesn't match fn (f32, f32) -> f32",
|
"f32 intrinsic signature doesn't match fn (f32, f32) -> f32".into(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let arg1 = from_bytes!(f32, arg1.get(self)?);
|
let arg1 = from_bytes!(f32, arg1.get(self)?);
|
||||||
|
@ -702,8 +716,8 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"powi" => {
|
"powi" => {
|
||||||
let [arg1, arg2] = args else {
|
let [arg1, arg2] = args else {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"powif32 signature doesn't match fn (f32, i32) -> f32",
|
"powif32 signature doesn't match fn (f32, i32) -> f32".into(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let arg1 = from_bytes!(f32, arg1.get(self)?);
|
let arg1 = from_bytes!(f32, arg1.get(self)?);
|
||||||
|
@ -712,8 +726,8 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"fma" => {
|
"fma" => {
|
||||||
let [arg1, arg2, arg3] = args else {
|
let [arg1, arg2, arg3] = args else {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"fmaf32 signature doesn't match fn (f32, f32, f32) -> f32",
|
"fmaf32 signature doesn't match fn (f32, f32, f32) -> f32".into(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let arg1 = from_bytes!(f32, arg1.get(self)?);
|
let arg1 = from_bytes!(f32, arg1.get(self)?);
|
||||||
|
@ -730,7 +744,9 @@ impl Evaluator<'_> {
|
||||||
let Some(ty) =
|
let Some(ty) =
|
||||||
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
||||||
else {
|
else {
|
||||||
return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"size_of generic arg is not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let size = self.size_of_sized(ty, locals, "size_of arg")?;
|
let size = self.size_of_sized(ty, locals, "size_of arg")?;
|
||||||
destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size])
|
destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size])
|
||||||
|
@ -739,7 +755,9 @@ impl Evaluator<'_> {
|
||||||
let Some(ty) =
|
let Some(ty) =
|
||||||
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
||||||
else {
|
else {
|
||||||
return Err(MirEvalError::TypeError("align_of generic arg is not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"align_of generic arg is not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let align = self.layout(ty)?.align.abi.bytes();
|
let align = self.layout(ty)?.align.abi.bytes();
|
||||||
destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size])
|
destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size])
|
||||||
|
@ -748,10 +766,14 @@ impl Evaluator<'_> {
|
||||||
let Some(ty) =
|
let Some(ty) =
|
||||||
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
||||||
else {
|
else {
|
||||||
return Err(MirEvalError::TypeError("size_of_val generic arg is not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"size_of_val generic arg is not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let [arg] = args else {
|
let [arg] = args else {
|
||||||
return Err(MirEvalError::TypeError("size_of_val args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"size_of_val args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
if let Some((size, _)) = self.size_align_of(ty, locals)? {
|
if let Some((size, _)) = self.size_align_of(ty, locals)? {
|
||||||
destination.write_from_bytes(self, &size.to_le_bytes())
|
destination.write_from_bytes(self, &size.to_le_bytes())
|
||||||
|
@ -765,12 +787,14 @@ impl Evaluator<'_> {
|
||||||
let Some(ty) =
|
let Some(ty) =
|
||||||
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
||||||
else {
|
else {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"min_align_of_val generic arg is not provided",
|
"min_align_of_val generic arg is not provided".into(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let [arg] = args else {
|
let [arg] = args else {
|
||||||
return Err(MirEvalError::TypeError("min_align_of_val args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"min_align_of_val args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
if let Some((_, align)) = self.size_align_of(ty, locals)? {
|
if let Some((_, align)) = self.size_align_of(ty, locals)? {
|
||||||
destination.write_from_bytes(self, &align.to_le_bytes())
|
destination.write_from_bytes(self, &align.to_le_bytes())
|
||||||
|
@ -784,7 +808,9 @@ impl Evaluator<'_> {
|
||||||
let Some(ty) =
|
let Some(ty) =
|
||||||
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
||||||
else {
|
else {
|
||||||
return Err(MirEvalError::TypeError("type_name generic arg is not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"type_name generic arg is not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let ty_name = match ty.display_source_code(
|
let ty_name = match ty.display_source_code(
|
||||||
self.db,
|
self.db,
|
||||||
|
@ -808,7 +834,9 @@ impl Evaluator<'_> {
|
||||||
let Some(ty) =
|
let Some(ty) =
|
||||||
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
||||||
else {
|
else {
|
||||||
return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"size_of generic arg is not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let result = !ty.clone().is_copy(self.db, locals.body.owner);
|
let result = !ty.clone().is_copy(self.db, locals.body.owner);
|
||||||
destination.write_from_bytes(self, &[u8::from(result)])
|
destination.write_from_bytes(self, &[u8::from(result)])
|
||||||
|
@ -817,14 +845,18 @@ impl Evaluator<'_> {
|
||||||
// FIXME: this is wrong for const eval, it should return 2 in some
|
// FIXME: this is wrong for const eval, it should return 2 in some
|
||||||
// cases.
|
// cases.
|
||||||
let [lhs, rhs] = args else {
|
let [lhs, rhs] = args else {
|
||||||
return Err(MirEvalError::TypeError("wrapping_add args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"wrapping_add args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let ans = lhs.get(self)? == rhs.get(self)?;
|
let ans = lhs.get(self)? == rhs.get(self)?;
|
||||||
destination.write_from_bytes(self, &[u8::from(ans)])
|
destination.write_from_bytes(self, &[u8::from(ans)])
|
||||||
}
|
}
|
||||||
"saturating_add" | "saturating_sub" => {
|
"saturating_add" | "saturating_sub" => {
|
||||||
let [lhs, rhs] = args else {
|
let [lhs, rhs] = args else {
|
||||||
return Err(MirEvalError::TypeError("saturating_add args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"saturating_add args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
|
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
|
||||||
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
|
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
|
||||||
|
@ -844,7 +876,9 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"wrapping_add" | "unchecked_add" => {
|
"wrapping_add" | "unchecked_add" => {
|
||||||
let [lhs, rhs] = args else {
|
let [lhs, rhs] = args else {
|
||||||
return Err(MirEvalError::TypeError("wrapping_add args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"wrapping_add args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
|
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
|
||||||
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
|
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
|
||||||
|
@ -853,7 +887,9 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"ptr_offset_from_unsigned" | "ptr_offset_from" => {
|
"ptr_offset_from_unsigned" | "ptr_offset_from" => {
|
||||||
let [lhs, rhs] = args else {
|
let [lhs, rhs] = args else {
|
||||||
return Err(MirEvalError::TypeError("wrapping_sub args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"wrapping_sub args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let lhs = i128::from_le_bytes(pad16(lhs.get(self)?, false));
|
let lhs = i128::from_le_bytes(pad16(lhs.get(self)?, false));
|
||||||
let rhs = i128::from_le_bytes(pad16(rhs.get(self)?, false));
|
let rhs = i128::from_le_bytes(pad16(rhs.get(self)?, false));
|
||||||
|
@ -861,8 +897,8 @@ impl Evaluator<'_> {
|
||||||
let Some(ty) =
|
let Some(ty) =
|
||||||
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
||||||
else {
|
else {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"ptr_offset_from generic arg is not provided",
|
"ptr_offset_from generic arg is not provided".into(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let size = self.size_of_sized(ty, locals, "ptr_offset_from arg")? as i128;
|
let size = self.size_of_sized(ty, locals, "ptr_offset_from arg")? as i128;
|
||||||
|
@ -871,7 +907,9 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"wrapping_sub" | "unchecked_sub" => {
|
"wrapping_sub" | "unchecked_sub" => {
|
||||||
let [lhs, rhs] = args else {
|
let [lhs, rhs] = args else {
|
||||||
return Err(MirEvalError::TypeError("wrapping_sub args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"wrapping_sub args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
|
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
|
||||||
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
|
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
|
||||||
|
@ -880,7 +918,9 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"wrapping_mul" | "unchecked_mul" => {
|
"wrapping_mul" | "unchecked_mul" => {
|
||||||
let [lhs, rhs] = args else {
|
let [lhs, rhs] = args else {
|
||||||
return Err(MirEvalError::TypeError("wrapping_mul args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"wrapping_mul args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
|
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
|
||||||
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
|
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
|
||||||
|
@ -890,7 +930,9 @@ impl Evaluator<'_> {
|
||||||
"wrapping_shl" | "unchecked_shl" => {
|
"wrapping_shl" | "unchecked_shl" => {
|
||||||
// FIXME: signed
|
// FIXME: signed
|
||||||
let [lhs, rhs] = args else {
|
let [lhs, rhs] = args else {
|
||||||
return Err(MirEvalError::TypeError("unchecked_shl args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"unchecked_shl args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
|
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
|
||||||
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
|
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
|
||||||
|
@ -900,7 +942,9 @@ impl Evaluator<'_> {
|
||||||
"wrapping_shr" | "unchecked_shr" => {
|
"wrapping_shr" | "unchecked_shr" => {
|
||||||
// FIXME: signed
|
// FIXME: signed
|
||||||
let [lhs, rhs] = args else {
|
let [lhs, rhs] = args else {
|
||||||
return Err(MirEvalError::TypeError("unchecked_shr args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"unchecked_shr args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
|
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
|
||||||
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
|
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
|
||||||
|
@ -910,7 +954,9 @@ impl Evaluator<'_> {
|
||||||
"unchecked_rem" => {
|
"unchecked_rem" => {
|
||||||
// FIXME: signed
|
// FIXME: signed
|
||||||
let [lhs, rhs] = args else {
|
let [lhs, rhs] = args else {
|
||||||
return Err(MirEvalError::TypeError("unchecked_rem args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"unchecked_rem args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
|
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
|
||||||
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
|
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
|
||||||
|
@ -922,7 +968,9 @@ impl Evaluator<'_> {
|
||||||
"unchecked_div" | "exact_div" => {
|
"unchecked_div" | "exact_div" => {
|
||||||
// FIXME: signed
|
// FIXME: signed
|
||||||
let [lhs, rhs] = args else {
|
let [lhs, rhs] = args else {
|
||||||
return Err(MirEvalError::TypeError("unchecked_div args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"unchecked_div args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
|
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
|
||||||
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
|
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
|
||||||
|
@ -933,7 +981,9 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"add_with_overflow" | "sub_with_overflow" | "mul_with_overflow" => {
|
"add_with_overflow" | "sub_with_overflow" | "mul_with_overflow" => {
|
||||||
let [lhs, rhs] = args else {
|
let [lhs, rhs] = args else {
|
||||||
return Err(MirEvalError::TypeError("const_eval_select args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"const_eval_select args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let result_ty = TyKind::Tuple(
|
let result_ty = TyKind::Tuple(
|
||||||
2,
|
2,
|
||||||
|
@ -954,7 +1004,7 @@ impl Evaluator<'_> {
|
||||||
|| ans.to_le_bytes()[op_size..].iter().any(|&it| it != 0 && it != 255);
|
|| ans.to_le_bytes()[op_size..].iter().any(|&it| it != 0 && it != 255);
|
||||||
let is_overflow = vec![u8::from(is_overflow)];
|
let is_overflow = vec![u8::from(is_overflow)];
|
||||||
let layout = self.layout(&result_ty)?;
|
let layout = self.layout(&result_ty)?;
|
||||||
let result = self.make_by_layout(
|
let result = self.construct_with_layout(
|
||||||
layout.size.bytes_usize(),
|
layout.size.bytes_usize(),
|
||||||
&layout,
|
&layout,
|
||||||
None,
|
None,
|
||||||
|
@ -966,15 +1016,15 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"copy" | "copy_nonoverlapping" => {
|
"copy" | "copy_nonoverlapping" => {
|
||||||
let [src, dst, offset] = args else {
|
let [src, dst, offset] = args else {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"copy_nonoverlapping args are not provided",
|
"copy_nonoverlapping args are not provided".into(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let Some(ty) =
|
let Some(ty) =
|
||||||
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
||||||
else {
|
else {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"copy_nonoverlapping generic arg is not provided",
|
"copy_nonoverlapping generic arg is not provided".into(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let src = Address::from_bytes(src.get(self)?)?;
|
let src = Address::from_bytes(src.get(self)?)?;
|
||||||
|
@ -988,18 +1038,22 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"offset" | "arith_offset" => {
|
"offset" | "arith_offset" => {
|
||||||
let [ptr, offset] = args else {
|
let [ptr, offset] = args else {
|
||||||
return Err(MirEvalError::TypeError("offset args are not provided"));
|
return Err(MirEvalError::InternalError("offset args are not provided".into()));
|
||||||
};
|
};
|
||||||
let ty = if name == "offset" {
|
let ty = if name == "offset" {
|
||||||
let Some(ty0) =
|
let Some(ty0) =
|
||||||
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
||||||
else {
|
else {
|
||||||
return Err(MirEvalError::TypeError("offset generic arg is not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"offset generic arg is not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let Some(ty1) =
|
let Some(ty1) =
|
||||||
generic_args.as_slice(Interner).get(1).and_then(|it| it.ty(Interner))
|
generic_args.as_slice(Interner).get(1).and_then(|it| it.ty(Interner))
|
||||||
else {
|
else {
|
||||||
return Err(MirEvalError::TypeError("offset generic arg is not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"offset generic arg is not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
if !matches!(
|
if !matches!(
|
||||||
ty1.as_builtin(),
|
ty1.as_builtin(),
|
||||||
|
@ -1008,15 +1062,15 @@ impl Evaluator<'_> {
|
||||||
| BuiltinType::Uint(BuiltinUint::Usize)
|
| BuiltinType::Uint(BuiltinUint::Usize)
|
||||||
)
|
)
|
||||||
) {
|
) {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"offset generic arg is not usize or isize",
|
"offset generic arg is not usize or isize".into(),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
match ty0.as_raw_ptr() {
|
match ty0.as_raw_ptr() {
|
||||||
Some((ty, _)) => ty,
|
Some((ty, _)) => ty,
|
||||||
None => {
|
None => {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"offset generic arg is not a raw pointer",
|
"offset generic arg is not a raw pointer".into(),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1024,8 +1078,8 @@ impl Evaluator<'_> {
|
||||||
let Some(ty) =
|
let Some(ty) =
|
||||||
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
||||||
else {
|
else {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"arith_offset generic arg is not provided",
|
"arith_offset generic arg is not provided".into(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
ty
|
ty
|
||||||
|
@ -1046,19 +1100,21 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"transmute" => {
|
"transmute" => {
|
||||||
let [arg] = args else {
|
let [arg] = args else {
|
||||||
return Err(MirEvalError::TypeError("transmute arg is not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"transmute arg is not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
destination.write_from_interval(self, arg.interval)
|
destination.write_from_interval(self, arg.interval)
|
||||||
}
|
}
|
||||||
"likely" | "unlikely" => {
|
"likely" | "unlikely" => {
|
||||||
let [arg] = args else {
|
let [arg] = args else {
|
||||||
return Err(MirEvalError::TypeError("likely arg is not provided"));
|
return Err(MirEvalError::InternalError("likely arg is not provided".into()));
|
||||||
};
|
};
|
||||||
destination.write_from_interval(self, arg.interval)
|
destination.write_from_interval(self, arg.interval)
|
||||||
}
|
}
|
||||||
"ctpop" => {
|
"ctpop" => {
|
||||||
let [arg] = args else {
|
let [arg] = args else {
|
||||||
return Err(MirEvalError::TypeError("ctpop arg is not provided"));
|
return Err(MirEvalError::InternalError("ctpop arg is not provided".into()));
|
||||||
};
|
};
|
||||||
let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).count_ones();
|
let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).count_ones();
|
||||||
destination
|
destination
|
||||||
|
@ -1066,7 +1122,7 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"ctlz" | "ctlz_nonzero" => {
|
"ctlz" | "ctlz_nonzero" => {
|
||||||
let [arg] = args else {
|
let [arg] = args else {
|
||||||
return Err(MirEvalError::TypeError("ctlz arg is not provided"));
|
return Err(MirEvalError::InternalError("ctlz arg is not provided".into()));
|
||||||
};
|
};
|
||||||
let result =
|
let result =
|
||||||
u128::from_le_bytes(pad16(arg.get(self)?, false)).leading_zeros() as usize;
|
u128::from_le_bytes(pad16(arg.get(self)?, false)).leading_zeros() as usize;
|
||||||
|
@ -1076,7 +1132,7 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"cttz" | "cttz_nonzero" => {
|
"cttz" | "cttz_nonzero" => {
|
||||||
let [arg] = args else {
|
let [arg] = args else {
|
||||||
return Err(MirEvalError::TypeError("cttz arg is not provided"));
|
return Err(MirEvalError::InternalError("cttz arg is not provided".into()));
|
||||||
};
|
};
|
||||||
let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).trailing_zeros();
|
let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).trailing_zeros();
|
||||||
destination
|
destination
|
||||||
|
@ -1084,7 +1140,9 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"rotate_left" => {
|
"rotate_left" => {
|
||||||
let [lhs, rhs] = args else {
|
let [lhs, rhs] = args else {
|
||||||
return Err(MirEvalError::TypeError("rotate_left args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"rotate_left args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let lhs = &lhs.get(self)?[0..destination.size];
|
let lhs = &lhs.get(self)?[0..destination.size];
|
||||||
let rhs = rhs.get(self)?[0] as u32;
|
let rhs = rhs.get(self)?[0] as u32;
|
||||||
|
@ -1114,7 +1172,9 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"rotate_right" => {
|
"rotate_right" => {
|
||||||
let [lhs, rhs] = args else {
|
let [lhs, rhs] = args else {
|
||||||
return Err(MirEvalError::TypeError("rotate_right args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"rotate_right args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let lhs = &lhs.get(self)?[0..destination.size];
|
let lhs = &lhs.get(self)?[0..destination.size];
|
||||||
let rhs = rhs.get(self)?[0] as u32;
|
let rhs = rhs.get(self)?[0] as u32;
|
||||||
|
@ -1144,13 +1204,15 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"discriminant_value" => {
|
"discriminant_value" => {
|
||||||
let [arg] = args else {
|
let [arg] = args else {
|
||||||
return Err(MirEvalError::TypeError("discriminant_value arg is not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"discriminant_value arg is not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let Some(ty) =
|
let Some(ty) =
|
||||||
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
||||||
else {
|
else {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"discriminant_value generic arg is not provided",
|
"discriminant_value generic arg is not provided".into(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let addr = Address::from_bytes(arg.get(self)?)?;
|
let addr = Address::from_bytes(arg.get(self)?)?;
|
||||||
|
@ -1161,11 +1223,15 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"const_eval_select" => {
|
"const_eval_select" => {
|
||||||
let [tuple, const_fn, _] = args else {
|
let [tuple, const_fn, _] = args else {
|
||||||
return Err(MirEvalError::TypeError("const_eval_select args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"const_eval_select args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let mut args = vec![const_fn.clone()];
|
let mut args = vec![const_fn.clone()];
|
||||||
let TyKind::Tuple(_, fields) = tuple.ty.kind(Interner) else {
|
let TyKind::Tuple(_, fields) = tuple.ty.kind(Interner) else {
|
||||||
return Err(MirEvalError::TypeError("const_eval_select arg[0] is not a tuple"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"const_eval_select arg[0] is not a tuple".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let layout = self.layout(&tuple.ty)?;
|
let layout = self.layout(&tuple.ty)?;
|
||||||
for (i, field) in fields.iter(Interner).enumerate() {
|
for (i, field) in fields.iter(Interner).enumerate() {
|
||||||
|
@ -1196,21 +1262,25 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"read_via_copy" | "volatile_load" => {
|
"read_via_copy" | "volatile_load" => {
|
||||||
let [arg] = args else {
|
let [arg] = args else {
|
||||||
return Err(MirEvalError::TypeError("read_via_copy args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"read_via_copy args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let addr = Address::from_bytes(arg.interval.get(self)?)?;
|
let addr = Address::from_bytes(arg.interval.get(self)?)?;
|
||||||
destination.write_from_interval(self, Interval { addr, size: destination.size })
|
destination.write_from_interval(self, Interval { addr, size: destination.size })
|
||||||
}
|
}
|
||||||
"write_via_move" => {
|
"write_via_move" => {
|
||||||
let [ptr, val] = args else {
|
let [ptr, val] = args else {
|
||||||
return Err(MirEvalError::TypeError("write_via_move args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"write_via_move args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let dst = Address::from_bytes(ptr.get(self)?)?;
|
let dst = Address::from_bytes(ptr.get(self)?)?;
|
||||||
let Some(ty) =
|
let Some(ty) =
|
||||||
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
||||||
else {
|
else {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"write_via_copy generic arg is not provided",
|
"write_via_copy generic arg is not provided".into(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let size = self.size_of_sized(ty, locals, "write_via_move ptr type")?;
|
let size = self.size_of_sized(ty, locals, "write_via_move ptr type")?;
|
||||||
|
@ -1219,14 +1289,18 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"write_bytes" => {
|
"write_bytes" => {
|
||||||
let [dst, val, count] = args else {
|
let [dst, val, count] = args else {
|
||||||
return Err(MirEvalError::TypeError("write_bytes args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"write_bytes args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let count = from_bytes!(usize, count.get(self)?);
|
let count = from_bytes!(usize, count.get(self)?);
|
||||||
let val = from_bytes!(u8, val.get(self)?);
|
let val = from_bytes!(u8, val.get(self)?);
|
||||||
let Some(ty) =
|
let Some(ty) =
|
||||||
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
||||||
else {
|
else {
|
||||||
return Err(MirEvalError::TypeError("write_bytes generic arg is not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"write_bytes generic arg is not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let dst = Address::from_bytes(dst.get(self)?)?;
|
let dst = Address::from_bytes(dst.get(self)?)?;
|
||||||
let size = self.size_of_sized(ty, locals, "copy_nonoverlapping ptr type")?;
|
let size = self.size_of_sized(ty, locals, "copy_nonoverlapping ptr type")?;
|
||||||
|
@ -1310,10 +1384,14 @@ impl Evaluator<'_> {
|
||||||
|
|
||||||
let Some(ty) = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
let Some(ty) = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
||||||
else {
|
else {
|
||||||
return Err(MirEvalError::TypeError("atomic intrinsic generic arg is not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"atomic intrinsic generic arg is not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let Some(arg0) = args.first() else {
|
let Some(arg0) = args.first() else {
|
||||||
return Err(MirEvalError::TypeError("atomic intrinsic arg0 is not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"atomic intrinsic arg0 is not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let arg0_addr = Address::from_bytes(arg0.get(self)?)?;
|
let arg0_addr = Address::from_bytes(arg0.get(self)?)?;
|
||||||
let arg0_interval =
|
let arg0_interval =
|
||||||
|
@ -1322,7 +1400,9 @@ impl Evaluator<'_> {
|
||||||
return destination.write_from_interval(self, arg0_interval);
|
return destination.write_from_interval(self, arg0_interval);
|
||||||
}
|
}
|
||||||
let Some(arg1) = args.get(1) else {
|
let Some(arg1) = args.get(1) else {
|
||||||
return Err(MirEvalError::TypeError("atomic intrinsic arg1 is not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"atomic intrinsic arg1 is not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
if name.starts_with("store_") {
|
if name.starts_with("store_") {
|
||||||
return arg0_interval.write_from_interval(self, arg1.interval);
|
return arg0_interval.write_from_interval(self, arg1.interval);
|
||||||
|
@ -1374,7 +1454,9 @@ impl Evaluator<'_> {
|
||||||
return arg0_interval.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
|
return arg0_interval.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
|
||||||
}
|
}
|
||||||
let Some(arg2) = args.get(2) else {
|
let Some(arg2) = args.get(2) else {
|
||||||
return Err(MirEvalError::TypeError("atomic intrinsic arg2 is not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"atomic intrinsic arg2 is not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
if name.starts_with("cxchg_") || name.starts_with("cxchgweak_") {
|
if name.starts_with("cxchg_") || name.starts_with("cxchgweak_") {
|
||||||
let dest = if arg1.get(self)? == arg0_interval.get(self)? {
|
let dest = if arg1.get(self)? == arg0_interval.get(self)? {
|
||||||
|
@ -1389,7 +1471,7 @@ impl Evaluator<'_> {
|
||||||
)
|
)
|
||||||
.intern(Interner);
|
.intern(Interner);
|
||||||
let layout = self.layout(&result_ty)?;
|
let layout = self.layout(&result_ty)?;
|
||||||
let result = self.make_by_layout(
|
let result = self.construct_with_layout(
|
||||||
layout.size.bytes_usize(),
|
layout.size.bytes_usize(),
|
||||||
&layout,
|
&layout,
|
||||||
None,
|
None,
|
||||||
|
|
|
@ -10,7 +10,7 @@ macro_rules! from_bytes {
|
||||||
($ty:tt, $value:expr) => {
|
($ty:tt, $value:expr) => {
|
||||||
($ty::from_le_bytes(match ($value).try_into() {
|
($ty::from_le_bytes(match ($value).try_into() {
|
||||||
Ok(it) => it,
|
Ok(it) => it,
|
||||||
Err(_) => return Err(MirEvalError::TypeError("mismatched size")),
|
Err(_) => return Err(MirEvalError::InternalError("mismatched size".into())),
|
||||||
}))
|
}))
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -40,7 +40,9 @@ impl Evaluator<'_> {
|
||||||
.substitute(Interner, subst);
|
.substitute(Interner, subst);
|
||||||
return Ok((fields.len(), field_ty));
|
return Ok((fields.len(), field_ty));
|
||||||
}
|
}
|
||||||
return Err(MirEvalError::TypeError("simd type with no len param"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"simd type with no len param".into(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
match try_const_usize(self.db, len) {
|
match try_const_usize(self.db, len) {
|
||||||
|
@ -48,14 +50,18 @@ impl Evaluator<'_> {
|
||||||
let Some(ty) =
|
let Some(ty) =
|
||||||
subst.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
subst.as_slice(Interner).first().and_then(|it| it.ty(Interner))
|
||||||
else {
|
else {
|
||||||
return Err(MirEvalError::TypeError("simd type with no ty param"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"simd type with no ty param".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
Ok((len as usize, ty.clone()))
|
Ok((len as usize, ty.clone()))
|
||||||
}
|
}
|
||||||
None => Err(MirEvalError::TypeError("simd type with unevaluatable len param")),
|
None => Err(MirEvalError::InternalError(
|
||||||
|
"simd type with unevaluatable len param".into(),
|
||||||
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => Err(MirEvalError::TypeError("simd type which is not a struct")),
|
_ => Err(MirEvalError::InternalError("simd type which is not a struct".into())),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -71,7 +77,9 @@ impl Evaluator<'_> {
|
||||||
match name {
|
match name {
|
||||||
"and" | "or" | "xor" => {
|
"and" | "or" | "xor" => {
|
||||||
let [left, right] = args else {
|
let [left, right] = args else {
|
||||||
return Err(MirEvalError::TypeError("simd bit op args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"simd bit op args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let result = left
|
let result = left
|
||||||
.get(self)?
|
.get(self)?
|
||||||
|
@ -88,7 +96,7 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"eq" | "ne" | "lt" | "le" | "gt" | "ge" => {
|
"eq" | "ne" | "lt" | "le" | "gt" | "ge" => {
|
||||||
let [left, right] = args else {
|
let [left, right] = args else {
|
||||||
return Err(MirEvalError::TypeError("simd args are not provided"));
|
return Err(MirEvalError::InternalError("simd args are not provided".into()));
|
||||||
};
|
};
|
||||||
let (len, ty) = self.detect_simd_ty(&left.ty)?;
|
let (len, ty) = self.detect_simd_ty(&left.ty)?;
|
||||||
let is_signed = matches!(ty.as_builtin(), Some(BuiltinType::Int(_)));
|
let is_signed = matches!(ty.as_builtin(), Some(BuiltinType::Int(_)));
|
||||||
|
@ -125,7 +133,9 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"bitmask" => {
|
"bitmask" => {
|
||||||
let [op] = args else {
|
let [op] = args else {
|
||||||
return Err(MirEvalError::TypeError("simd_bitmask args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"simd_bitmask args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let (op_len, _) = self.detect_simd_ty(&op.ty)?;
|
let (op_len, _) = self.detect_simd_ty(&op.ty)?;
|
||||||
let op_count = op.interval.size / op_len;
|
let op_count = op.interval.size / op_len;
|
||||||
|
@ -139,18 +149,20 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
"shuffle" => {
|
"shuffle" => {
|
||||||
let [left, right, index] = args else {
|
let [left, right, index] = args else {
|
||||||
return Err(MirEvalError::TypeError("simd_shuffle args are not provided"));
|
return Err(MirEvalError::InternalError(
|
||||||
|
"simd_shuffle args are not provided".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
let TyKind::Array(_, index_len) = index.ty.kind(Interner) else {
|
let TyKind::Array(_, index_len) = index.ty.kind(Interner) else {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"simd_shuffle index argument has non-array type",
|
"simd_shuffle index argument has non-array type".into(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let index_len = match try_const_usize(self.db, index_len) {
|
let index_len = match try_const_usize(self.db, index_len) {
|
||||||
Some(it) => it as usize,
|
Some(it) => it as usize,
|
||||||
None => {
|
None => {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"simd type with unevaluatable len param",
|
"simd type with unevaluatable len param".into(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -164,8 +176,8 @@ impl Evaluator<'_> {
|
||||||
let val = match vector.clone().nth(index) {
|
let val = match vector.clone().nth(index) {
|
||||||
Some(it) => it,
|
Some(it) => it,
|
||||||
None => {
|
None => {
|
||||||
return Err(MirEvalError::TypeError(
|
return Err(MirEvalError::InternalError(
|
||||||
"out of bound access in simd shuffle",
|
"out of bound access in simd shuffle".into(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -97,7 +97,7 @@ pub enum MirLowerError {
|
||||||
MutatingRvalue,
|
MutatingRvalue,
|
||||||
UnresolvedLabel,
|
UnresolvedLabel,
|
||||||
UnresolvedUpvar(Place),
|
UnresolvedUpvar(Place),
|
||||||
UnaccessableLocal,
|
InaccessibleLocal,
|
||||||
|
|
||||||
// monomorphization errors:
|
// monomorphization errors:
|
||||||
GenericArgNotProvided(TypeOrConstParamId, Substitution),
|
GenericArgNotProvided(TypeOrConstParamId, Substitution),
|
||||||
|
@ -116,7 +116,7 @@ impl DropScopeToken {
|
||||||
ctx.pop_drop_scope_internal(current, span)
|
ctx.pop_drop_scope_internal(current, span)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// It is useful when we want a drop scope is syntaxically closed, but we don't want to execute any drop
|
/// It is useful when we want a drop scope is syntactically closed, but we don't want to execute any drop
|
||||||
/// code. Either when the control flow is diverging (so drop code doesn't reached) or when drop is handled
|
/// code. Either when the control flow is diverging (so drop code doesn't reached) or when drop is handled
|
||||||
/// for us (for example a block that ended with a return statement. Return will drop everything, so the block shouldn't
|
/// for us (for example a block that ended with a return statement. Return will drop everything, so the block shouldn't
|
||||||
/// do anything)
|
/// do anything)
|
||||||
|
@ -186,7 +186,7 @@ impl MirLowerError {
|
||||||
| MirLowerError::UnsizedTemporary(_)
|
| MirLowerError::UnsizedTemporary(_)
|
||||||
| MirLowerError::IncompleteExpr
|
| MirLowerError::IncompleteExpr
|
||||||
| MirLowerError::IncompletePattern
|
| MirLowerError::IncompletePattern
|
||||||
| MirLowerError::UnaccessableLocal
|
| MirLowerError::InaccessibleLocal
|
||||||
| MirLowerError::TraitFunctionDefinition(_, _)
|
| MirLowerError::TraitFunctionDefinition(_, _)
|
||||||
| MirLowerError::UnresolvedName(_)
|
| MirLowerError::UnresolvedName(_)
|
||||||
| MirLowerError::RecordLiteralWithoutPath
|
| MirLowerError::RecordLiteralWithoutPath
|
||||||
|
@ -939,7 +939,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
Ok(Some(current))
|
Ok(Some(current))
|
||||||
}
|
}
|
||||||
Expr::BinaryOp { lhs, rhs, op } => {
|
Expr::BinaryOp { lhs, rhs, op } => {
|
||||||
let op = op.ok_or(MirLowerError::IncompleteExpr)?;
|
let op: BinaryOp = op.ok_or(MirLowerError::IncompleteExpr)?;
|
||||||
let is_builtin = 'b: {
|
let is_builtin = 'b: {
|
||||||
// Without adjust here is a hack. We assume that we know every possible adjustment
|
// Without adjust here is a hack. We assume that we know every possible adjustment
|
||||||
// for binary operator, and use without adjust to simplify our conditions.
|
// for binary operator, and use without adjust to simplify our conditions.
|
||||||
|
@ -1843,8 +1843,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
None => {
|
None => {
|
||||||
// FIXME: It should never happens, but currently it will happen in `const_dependent_on_local` test, which
|
// FIXME: It should never happens, but currently it will happen in `const_dependent_on_local` test, which
|
||||||
// is a hir lowering problem IMO.
|
// is a hir lowering problem IMO.
|
||||||
// never!("Using unaccessable local for binding is always a bug");
|
// never!("Using inaccessible local for binding is always a bug");
|
||||||
Err(MirLowerError::UnaccessableLocal)
|
Err(MirLowerError::InaccessibleLocal)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2068,7 +2068,7 @@ pub fn mir_body_for_closure_query(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<MirBody>> {
|
pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<MirBody>> {
|
||||||
let _p = profile::span("mir_body_query").detail(|| match def {
|
let detail = match def {
|
||||||
DefWithBodyId::FunctionId(it) => db.function_data(it).name.display(db.upcast()).to_string(),
|
DefWithBodyId::FunctionId(it) => db.function_data(it).name.display(db.upcast()).to_string(),
|
||||||
DefWithBodyId::StaticId(it) => db.static_data(it).name.display(db.upcast()).to_string(),
|
DefWithBodyId::StaticId(it) => db.static_data(it).name.display(db.upcast()).to_string(),
|
||||||
DefWithBodyId::ConstId(it) => db
|
DefWithBodyId::ConstId(it) => db
|
||||||
|
@ -2082,7 +2082,8 @@ pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<Mi
|
||||||
db.enum_variant_data(it).name.display(db.upcast()).to_string()
|
db.enum_variant_data(it).name.display(db.upcast()).to_string()
|
||||||
}
|
}
|
||||||
DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"),
|
DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"),
|
||||||
});
|
};
|
||||||
|
let _p = tracing::span!(tracing::Level::INFO, "mir_body_query", ?detail).entered();
|
||||||
let body = db.body(def);
|
let body = db.body(def);
|
||||||
let infer = db.infer(def);
|
let infer = db.infer(def);
|
||||||
let mut result = lower_to_mir(db, def, &body, &infer, body.body_expr)?;
|
let mut result = lower_to_mir(db, def, &body, &infer, body.body_expr)?;
|
||||||
|
|
|
@ -114,7 +114,7 @@ impl MirLowerCtx<'_> {
|
||||||
index: i as u32,
|
index: i as u32,
|
||||||
}))
|
}))
|
||||||
}),
|
}),
|
||||||
&mut cond_place,
|
&cond_place,
|
||||||
mode,
|
mode,
|
||||||
)?
|
)?
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,14 +1,14 @@
|
||||||
mod never_type;
|
|
||||||
mod coercion;
|
mod coercion;
|
||||||
mod regression;
|
mod diagnostics;
|
||||||
mod simple;
|
|
||||||
mod patterns;
|
|
||||||
mod traits;
|
|
||||||
mod method_resolution;
|
|
||||||
mod macros;
|
|
||||||
mod display_source_code;
|
mod display_source_code;
|
||||||
mod incremental;
|
mod incremental;
|
||||||
mod diagnostics;
|
mod macros;
|
||||||
|
mod method_resolution;
|
||||||
|
mod never_type;
|
||||||
|
mod patterns;
|
||||||
|
mod regression;
|
||||||
|
mod simple;
|
||||||
|
mod traits;
|
||||||
|
|
||||||
use std::{collections::HashMap, env};
|
use std::{collections::HashMap, env};
|
||||||
|
|
||||||
|
|
|
@ -3424,7 +3424,7 @@ fn bin_op_with_rhs_is_self_for_assoc_bound() {
|
||||||
fn repro<T>(t: T) -> bool
|
fn repro<T>(t: T) -> bool
|
||||||
where
|
where
|
||||||
T: Request,
|
T: Request,
|
||||||
T::Output: Convertable,
|
T::Output: Convertible,
|
||||||
{
|
{
|
||||||
let a = execute(&t).convert();
|
let a = execute(&t).convert();
|
||||||
let b = execute(&t).convert();
|
let b = execute(&t).convert();
|
||||||
|
@ -3439,7 +3439,7 @@ where
|
||||||
{
|
{
|
||||||
<T as Request>::output()
|
<T as Request>::output()
|
||||||
}
|
}
|
||||||
trait Convertable {
|
trait Convertible {
|
||||||
type TraitSelf: PartialEq<Self::TraitSelf>;
|
type TraitSelf: PartialEq<Self::TraitSelf>;
|
||||||
type AssocAsDefaultSelf: PartialEq;
|
type AssocAsDefaultSelf: PartialEq;
|
||||||
fn convert(self) -> Self::AssocAsDefaultSelf;
|
fn convert(self) -> Self::AssocAsDefaultSelf;
|
||||||
|
|
|
@ -100,13 +100,14 @@ pub(crate) fn trait_solve_query(
|
||||||
block: Option<BlockId>,
|
block: Option<BlockId>,
|
||||||
goal: Canonical<InEnvironment<Goal>>,
|
goal: Canonical<InEnvironment<Goal>>,
|
||||||
) -> Option<Solution> {
|
) -> Option<Solution> {
|
||||||
let _p = profile::span("trait_solve_query").detail(|| match &goal.value.goal.data(Interner) {
|
let detail = match &goal.value.goal.data(Interner) {
|
||||||
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => {
|
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => {
|
||||||
db.trait_data(it.hir_trait_id()).name.display(db.upcast()).to_string()
|
db.trait_data(it.hir_trait_id()).name.display(db.upcast()).to_string()
|
||||||
}
|
}
|
||||||
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_string(),
|
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_string(),
|
||||||
_ => "??".to_string(),
|
_ => "??".to_string(),
|
||||||
});
|
};
|
||||||
|
let _p = tracing::span!(tracing::Level::INFO, "trait_solve_query", ?detail).entered();
|
||||||
tracing::info!("trait_solve_query({:?})", goal.value.goal);
|
tracing::info!("trait_solve_query({:?})", goal.value.goal);
|
||||||
|
|
||||||
if let GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(AliasEq {
|
if let GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(AliasEq {
|
||||||
|
|
|
@ -24,18 +24,18 @@ use hir_def::{
|
||||||
};
|
};
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
|
use rustc_abi::TargetDataLayout;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
use stdx::never;
|
use stdx::never;
|
||||||
use triomphe::Arc;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
consteval::unknown_const,
|
consteval::unknown_const,
|
||||||
db::HirDatabase,
|
db::HirDatabase,
|
||||||
layout::{Layout, TagEncoding},
|
layout::{Layout, TagEncoding},
|
||||||
mir::pad16,
|
mir::pad16,
|
||||||
ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TraitEnvironment,
|
ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TraitRef, TraitRefExt,
|
||||||
TraitRef, TraitRefExt, Ty, WhereClause,
|
Ty, WhereClause,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) fn fn_traits(
|
pub(crate) fn fn_traits(
|
||||||
|
@ -192,7 +192,7 @@ pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
|
||||||
/// and it doesn't store the closure types and fields.
|
/// and it doesn't store the closure types and fields.
|
||||||
///
|
///
|
||||||
/// Codes should not assume this ordering, and should always use methods available
|
/// Codes should not assume this ordering, and should always use methods available
|
||||||
/// on this struct for retriving, and `TyBuilder::substs_for_closure` for creating.
|
/// on this struct for retrieving, and `TyBuilder::substs_for_closure` for creating.
|
||||||
pub(crate) struct ClosureSubst<'a>(pub(crate) &'a Substitution);
|
pub(crate) struct ClosureSubst<'a>(pub(crate) &'a Substitution);
|
||||||
|
|
||||||
impl<'a> ClosureSubst<'a> {
|
impl<'a> ClosureSubst<'a> {
|
||||||
|
@ -431,18 +431,16 @@ impl FallibleTypeFolder<Interner> for UnevaluatedConstEvaluatorFolder<'_> {
|
||||||
pub(crate) fn detect_variant_from_bytes<'a>(
|
pub(crate) fn detect_variant_from_bytes<'a>(
|
||||||
layout: &'a Layout,
|
layout: &'a Layout,
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
trait_env: Arc<TraitEnvironment>,
|
target_data_layout: &TargetDataLayout,
|
||||||
b: &[u8],
|
b: &[u8],
|
||||||
e: EnumId,
|
e: EnumId,
|
||||||
) -> Option<(EnumVariantId, &'a Layout)> {
|
) -> Option<(EnumVariantId, &'a Layout)> {
|
||||||
let krate = trait_env.krate;
|
|
||||||
let (var_id, var_layout) = match &layout.variants {
|
let (var_id, var_layout) = match &layout.variants {
|
||||||
hir_def::layout::Variants::Single { index } => {
|
hir_def::layout::Variants::Single { index } => {
|
||||||
(db.enum_data(e).variants[index.0].0, layout)
|
(db.enum_data(e).variants[index.0].0, layout)
|
||||||
}
|
}
|
||||||
hir_def::layout::Variants::Multiple { tag, tag_encoding, variants, .. } => {
|
hir_def::layout::Variants::Multiple { tag, tag_encoding, variants, .. } => {
|
||||||
let target_data_layout = db.target_data_layout(krate)?;
|
let size = tag.size(target_data_layout).bytes_usize();
|
||||||
let size = tag.size(&*target_data_layout).bytes_usize();
|
|
||||||
let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field
|
let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field
|
||||||
let tag = i128::from_le_bytes(pad16(&b[offset..offset + size], false));
|
let tag = i128::from_le_bytes(pad16(&b[offset..offset + size], false));
|
||||||
match tag_encoding {
|
match tag_encoding {
|
||||||
|
|
|
@ -17,6 +17,7 @@ either.workspace = true
|
||||||
arrayvec.workspace = true
|
arrayvec.workspace = true
|
||||||
itertools.workspace = true
|
itertools.workspace = true
|
||||||
smallvec.workspace = true
|
smallvec.workspace = true
|
||||||
|
tracing.workspace = true
|
||||||
triomphe.workspace = true
|
triomphe.workspace = true
|
||||||
once_cell = "1.17.1"
|
once_cell = "1.17.1"
|
||||||
|
|
||||||
|
@ -30,9 +31,10 @@ profile.workspace = true
|
||||||
stdx.workspace = true
|
stdx.workspace = true
|
||||||
syntax.workspace = true
|
syntax.workspace = true
|
||||||
tt.workspace = true
|
tt.workspace = true
|
||||||
|
span.workspace = true
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
in-rust-tree = []
|
in-rust-tree = []
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
|
@ -2,7 +2,6 @@
|
||||||
|
|
||||||
use std::ops::ControlFlow;
|
use std::ops::ControlFlow;
|
||||||
|
|
||||||
use base_db::FileId;
|
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
attr::AttrsWithOwner,
|
attr::AttrsWithOwner,
|
||||||
item_scope::ItemInNs,
|
item_scope::ItemInNs,
|
||||||
|
@ -11,12 +10,8 @@ use hir_def::{
|
||||||
resolver::{HasResolver, Resolver, TypeNs},
|
resolver::{HasResolver, Resolver, TypeNs},
|
||||||
AssocItemId, AttrDefId, ModuleDefId,
|
AssocItemId, AttrDefId, ModuleDefId,
|
||||||
};
|
};
|
||||||
use hir_expand::{
|
use hir_expand::{mod_path::PathKind, name::Name};
|
||||||
name::Name,
|
|
||||||
span_map::{RealSpanMap, SpanMapRef},
|
|
||||||
};
|
|
||||||
use hir_ty::{db::HirDatabase, method_resolution};
|
use hir_ty::{db::HirDatabase, method_resolution};
|
||||||
use syntax::{ast, AstNode};
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl,
|
Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl,
|
||||||
|
@ -129,7 +124,7 @@ fn resolve_doc_path_on_(
|
||||||
AttrDefId::GenericParamId(_) => return None,
|
AttrDefId::GenericParamId(_) => return None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut modpath = modpath_from_str(db, link)?;
|
let mut modpath = modpath_from_str(link)?;
|
||||||
|
|
||||||
let resolved = resolver.resolve_module_path_in_items(db.upcast(), &modpath);
|
let resolved = resolver.resolve_module_path_in_items(db.upcast(), &modpath);
|
||||||
if resolved.is_none() {
|
if resolved.is_none() {
|
||||||
|
@ -244,10 +239,9 @@ fn resolve_impl_trait_item(
|
||||||
) -> Option<DocLinkDef> {
|
) -> Option<DocLinkDef> {
|
||||||
let canonical = ty.canonical();
|
let canonical = ty.canonical();
|
||||||
let krate = ty.krate(db);
|
let krate = ty.krate(db);
|
||||||
let environment = resolver.generic_def().map_or_else(
|
let environment = resolver
|
||||||
|| crate::TraitEnvironment::empty(krate.id).into(),
|
.generic_def()
|
||||||
|d| db.trait_environment(d),
|
.map_or_else(|| crate::TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
|
||||||
);
|
|
||||||
let traits_in_scope = resolver.traits_in_scope(db.upcast());
|
let traits_in_scope = resolver.traits_in_scope(db.upcast());
|
||||||
|
|
||||||
let mut result = None;
|
let mut result = None;
|
||||||
|
@ -302,37 +296,40 @@ fn as_module_def_if_namespace_matches(
|
||||||
AssocItem::TypeAlias(it) => (ModuleDef::TypeAlias(it), Namespace::Types),
|
AssocItem::TypeAlias(it) => (ModuleDef::TypeAlias(it), Namespace::Types),
|
||||||
};
|
};
|
||||||
|
|
||||||
(ns.unwrap_or(expected_ns) == expected_ns).then(|| DocLinkDef::ModuleDef(def))
|
(ns.unwrap_or(expected_ns) == expected_ns).then_some(DocLinkDef::ModuleDef(def))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn modpath_from_str(db: &dyn HirDatabase, link: &str) -> Option<ModPath> {
|
fn modpath_from_str(link: &str) -> Option<ModPath> {
|
||||||
// FIXME: this is not how we should get a mod path here.
|
// FIXME: this is not how we should get a mod path here.
|
||||||
let try_get_modpath = |link: &str| {
|
let try_get_modpath = |link: &str| {
|
||||||
let ast_path = ast::SourceFile::parse(&format!("type T = {link};"))
|
let mut parts = link.split("::");
|
||||||
.syntax_node()
|
let mut first_segment = None;
|
||||||
.descendants()
|
let kind = match parts.next()? {
|
||||||
.find_map(ast::Path::cast)?;
|
"" => PathKind::Abs,
|
||||||
if ast_path.syntax().text() != link {
|
"crate" => PathKind::Crate,
|
||||||
return None;
|
"self" => PathKind::Super(0),
|
||||||
}
|
"super" => {
|
||||||
ModPath::from_src(
|
let mut deg = 1;
|
||||||
db.upcast(),
|
for segment in parts.by_ref() {
|
||||||
ast_path,
|
if segment == "super" {
|
||||||
SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId::BOGUS)),
|
deg += 1;
|
||||||
)
|
} else {
|
||||||
|
first_segment = Some(segment);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
PathKind::Super(deg)
|
||||||
|
}
|
||||||
|
segment => {
|
||||||
|
first_segment = Some(segment);
|
||||||
|
PathKind::Plain
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let parts = first_segment.into_iter().chain(parts).map(|segment| match segment.parse() {
|
||||||
|
Ok(idx) => Name::new_tuple_field(idx),
|
||||||
|
Err(_) => Name::new_text_dont_use(segment.into()),
|
||||||
|
});
|
||||||
|
Some(ModPath::from_segments(kind, parts))
|
||||||
};
|
};
|
||||||
|
try_get_modpath(link)
|
||||||
let full = try_get_modpath(link);
|
|
||||||
if full.is_some() {
|
|
||||||
return full;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Tuple field names cannot be a part of `ModPath` usually, but rustdoc can
|
|
||||||
// resolve doc paths like `TupleStruct::0`.
|
|
||||||
// FIXME: Find a better way to handle these.
|
|
||||||
let (base, maybe_tuple_field) = link.rsplit_once("::")?;
|
|
||||||
let tuple_field = Name::new_tuple_field(maybe_tuple_field.parse().ok()?);
|
|
||||||
let mut modpath = try_get_modpath(base)?;
|
|
||||||
modpath.push_segment(tuple_field);
|
|
||||||
Some(modpath)
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,11 +4,12 @@
|
||||||
//! This probably isn't the best way to do this -- ideally, diagnostics should
|
//! This probably isn't the best way to do this -- ideally, diagnostics should
|
||||||
//! be expressed in terms of hir types themselves.
|
//! be expressed in terms of hir types themselves.
|
||||||
pub use hir_ty::diagnostics::{CaseType, IncorrectCase};
|
pub use hir_ty::diagnostics::{CaseType, IncorrectCase};
|
||||||
|
use hir_ty::{db::HirDatabase, diagnostics::BodyValidationDiagnostic, InferenceDiagnostic};
|
||||||
|
|
||||||
use base_db::CrateId;
|
use base_db::CrateId;
|
||||||
use cfg::{CfgExpr, CfgOptions};
|
use cfg::{CfgExpr, CfgOptions};
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir_def::{path::ModPath, AssocItemId};
|
use hir_def::{body::SyntheticSyntax, hir::ExprOrPatId, path::ModPath, AssocItemId, DefWithBodyId};
|
||||||
use hir_expand::{name::Name, HirFileId, InFile};
|
use hir_expand::{name::Name, HirFileId, InFile};
|
||||||
use syntax::{ast, AstPtr, SyntaxError, SyntaxNodePtr, TextRange};
|
use syntax::{ast, AstPtr, SyntaxError, SyntaxNodePtr, TextRange};
|
||||||
|
|
||||||
|
@ -30,14 +31,28 @@ macro_rules! diagnostics {
|
||||||
)*
|
)*
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
// FIXME Accept something like the following in the macro call instead
|
||||||
|
// diagnostics![
|
||||||
|
// pub struct BreakOutsideOfLoop {
|
||||||
|
// pub expr: InFile<AstPtr<ast::Expr>>,
|
||||||
|
// pub is_break: bool,
|
||||||
|
// pub bad_value_break: bool,
|
||||||
|
// }, ...
|
||||||
|
// or more concisely
|
||||||
|
// BreakOutsideOfLoop {
|
||||||
|
// expr: InFile<AstPtr<ast::Expr>>,
|
||||||
|
// is_break: bool,
|
||||||
|
// bad_value_break: bool,
|
||||||
|
// }, ...
|
||||||
|
// ]
|
||||||
|
|
||||||
diagnostics![
|
diagnostics![
|
||||||
BreakOutsideOfLoop,
|
BreakOutsideOfLoop,
|
||||||
ExpectedFunction,
|
ExpectedFunction,
|
||||||
InactiveCode,
|
InactiveCode,
|
||||||
|
IncoherentImpl,
|
||||||
IncorrectCase,
|
IncorrectCase,
|
||||||
InvalidDeriveTarget,
|
InvalidDeriveTarget,
|
||||||
IncoherentImpl,
|
|
||||||
MacroDefError,
|
MacroDefError,
|
||||||
MacroError,
|
MacroError,
|
||||||
MacroExpansionParseError,
|
MacroExpansionParseError,
|
||||||
|
@ -55,8 +70,8 @@ diagnostics![
|
||||||
ReplaceFilterMapNextWithFindMap,
|
ReplaceFilterMapNextWithFindMap,
|
||||||
TraitImplIncorrectSafety,
|
TraitImplIncorrectSafety,
|
||||||
TraitImplMissingAssocItems,
|
TraitImplMissingAssocItems,
|
||||||
TraitImplRedundantAssocItems,
|
|
||||||
TraitImplOrphan,
|
TraitImplOrphan,
|
||||||
|
TraitImplRedundantAssocItems,
|
||||||
TypedHole,
|
TypedHole,
|
||||||
TypeMismatch,
|
TypeMismatch,
|
||||||
UndeclaredLabel,
|
UndeclaredLabel,
|
||||||
|
@ -326,3 +341,219 @@ pub struct TraitImplRedundantAssocItems {
|
||||||
pub impl_: AstPtr<ast::Impl>,
|
pub impl_: AstPtr<ast::Impl>,
|
||||||
pub assoc_item: (Name, AssocItem),
|
pub assoc_item: (Name, AssocItem),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl AnyDiagnostic {
|
||||||
|
pub(crate) fn body_validation_diagnostic(
|
||||||
|
db: &dyn HirDatabase,
|
||||||
|
diagnostic: BodyValidationDiagnostic,
|
||||||
|
source_map: &hir_def::body::BodySourceMap,
|
||||||
|
) -> Option<AnyDiagnostic> {
|
||||||
|
match diagnostic {
|
||||||
|
BodyValidationDiagnostic::RecordMissingFields { record, variant, missed_fields } => {
|
||||||
|
let variant_data = variant.variant_data(db.upcast());
|
||||||
|
let missed_fields = missed_fields
|
||||||
|
.into_iter()
|
||||||
|
.map(|idx| variant_data.fields()[idx].name.clone())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
match record {
|
||||||
|
Either::Left(record_expr) => match source_map.expr_syntax(record_expr) {
|
||||||
|
Ok(source_ptr) => {
|
||||||
|
let root = source_ptr.file_syntax(db.upcast());
|
||||||
|
if let ast::Expr::RecordExpr(record_expr) =
|
||||||
|
source_ptr.value.to_node(&root)
|
||||||
|
{
|
||||||
|
if record_expr.record_expr_field_list().is_some() {
|
||||||
|
let field_list_parent_path =
|
||||||
|
record_expr.path().map(|path| AstPtr::new(&path));
|
||||||
|
return Some(
|
||||||
|
MissingFields {
|
||||||
|
file: source_ptr.file_id,
|
||||||
|
field_list_parent: AstPtr::new(&Either::Left(
|
||||||
|
record_expr,
|
||||||
|
)),
|
||||||
|
field_list_parent_path,
|
||||||
|
missed_fields,
|
||||||
|
}
|
||||||
|
.into(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(SyntheticSyntax) => (),
|
||||||
|
},
|
||||||
|
Either::Right(record_pat) => match source_map.pat_syntax(record_pat) {
|
||||||
|
Ok(source_ptr) => {
|
||||||
|
if let Some(ptr) = source_ptr.value.cast::<ast::RecordPat>() {
|
||||||
|
let root = source_ptr.file_syntax(db.upcast());
|
||||||
|
let record_pat = ptr.to_node(&root);
|
||||||
|
if record_pat.record_pat_field_list().is_some() {
|
||||||
|
let field_list_parent_path =
|
||||||
|
record_pat.path().map(|path| AstPtr::new(&path));
|
||||||
|
return Some(
|
||||||
|
MissingFields {
|
||||||
|
file: source_ptr.file_id,
|
||||||
|
field_list_parent: AstPtr::new(&Either::Right(
|
||||||
|
record_pat,
|
||||||
|
)),
|
||||||
|
field_list_parent_path,
|
||||||
|
missed_fields,
|
||||||
|
}
|
||||||
|
.into(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(SyntheticSyntax) => (),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap { method_call_expr } => {
|
||||||
|
if let Ok(next_source_ptr) = source_map.expr_syntax(method_call_expr) {
|
||||||
|
return Some(
|
||||||
|
ReplaceFilterMapNextWithFindMap {
|
||||||
|
file: next_source_ptr.file_id,
|
||||||
|
next_expr: next_source_ptr.value,
|
||||||
|
}
|
||||||
|
.into(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
BodyValidationDiagnostic::MissingMatchArms { match_expr, uncovered_patterns } => {
|
||||||
|
match source_map.expr_syntax(match_expr) {
|
||||||
|
Ok(source_ptr) => {
|
||||||
|
let root = source_ptr.file_syntax(db.upcast());
|
||||||
|
if let ast::Expr::MatchExpr(match_expr) = &source_ptr.value.to_node(&root) {
|
||||||
|
match match_expr.expr() {
|
||||||
|
Some(scrut_expr) if match_expr.match_arm_list().is_some() => {
|
||||||
|
return Some(
|
||||||
|
MissingMatchArms {
|
||||||
|
scrutinee_expr: InFile::new(
|
||||||
|
source_ptr.file_id,
|
||||||
|
AstPtr::new(&scrut_expr),
|
||||||
|
),
|
||||||
|
uncovered_patterns,
|
||||||
|
}
|
||||||
|
.into(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(SyntheticSyntax) => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn inference_diagnostic(
|
||||||
|
db: &dyn HirDatabase,
|
||||||
|
def: DefWithBodyId,
|
||||||
|
d: &InferenceDiagnostic,
|
||||||
|
source_map: &hir_def::body::BodySourceMap,
|
||||||
|
) -> Option<AnyDiagnostic> {
|
||||||
|
let expr_syntax = |expr| source_map.expr_syntax(expr).expect("unexpected synthetic");
|
||||||
|
let pat_syntax = |pat| source_map.pat_syntax(pat).expect("unexpected synthetic");
|
||||||
|
Some(match d {
|
||||||
|
&InferenceDiagnostic::NoSuchField { field: expr, private } => {
|
||||||
|
let expr_or_pat = match expr {
|
||||||
|
ExprOrPatId::ExprId(expr) => {
|
||||||
|
source_map.field_syntax(expr).map(AstPtr::wrap_left)
|
||||||
|
}
|
||||||
|
ExprOrPatId::PatId(pat) => {
|
||||||
|
source_map.pat_field_syntax(pat).map(AstPtr::wrap_right)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
NoSuchField { field: expr_or_pat, private }.into()
|
||||||
|
}
|
||||||
|
&InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
|
||||||
|
MismatchedArgCount { call_expr: expr_syntax(call_expr), expected, found }.into()
|
||||||
|
}
|
||||||
|
&InferenceDiagnostic::PrivateField { expr, field } => {
|
||||||
|
let expr = expr_syntax(expr);
|
||||||
|
let field = field.into();
|
||||||
|
PrivateField { expr, field }.into()
|
||||||
|
}
|
||||||
|
&InferenceDiagnostic::PrivateAssocItem { id, item } => {
|
||||||
|
let expr_or_pat = match id {
|
||||||
|
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
|
||||||
|
ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right),
|
||||||
|
};
|
||||||
|
let item = item.into();
|
||||||
|
PrivateAssocItem { expr_or_pat, item }.into()
|
||||||
|
}
|
||||||
|
InferenceDiagnostic::ExpectedFunction { call_expr, found } => {
|
||||||
|
let call_expr = expr_syntax(*call_expr);
|
||||||
|
ExpectedFunction { call: call_expr, found: Type::new(db, def, found.clone()) }
|
||||||
|
.into()
|
||||||
|
}
|
||||||
|
InferenceDiagnostic::UnresolvedField {
|
||||||
|
expr,
|
||||||
|
receiver,
|
||||||
|
name,
|
||||||
|
method_with_same_name_exists,
|
||||||
|
} => {
|
||||||
|
let expr = expr_syntax(*expr);
|
||||||
|
UnresolvedField {
|
||||||
|
expr,
|
||||||
|
name: name.clone(),
|
||||||
|
receiver: Type::new(db, def, receiver.clone()),
|
||||||
|
method_with_same_name_exists: *method_with_same_name_exists,
|
||||||
|
}
|
||||||
|
.into()
|
||||||
|
}
|
||||||
|
InferenceDiagnostic::UnresolvedMethodCall {
|
||||||
|
expr,
|
||||||
|
receiver,
|
||||||
|
name,
|
||||||
|
field_with_same_name,
|
||||||
|
assoc_func_with_same_name,
|
||||||
|
} => {
|
||||||
|
let expr = expr_syntax(*expr);
|
||||||
|
UnresolvedMethodCall {
|
||||||
|
expr,
|
||||||
|
name: name.clone(),
|
||||||
|
receiver: Type::new(db, def, receiver.clone()),
|
||||||
|
field_with_same_name: field_with_same_name
|
||||||
|
.clone()
|
||||||
|
.map(|ty| Type::new(db, def, ty)),
|
||||||
|
assoc_func_with_same_name: *assoc_func_with_same_name,
|
||||||
|
}
|
||||||
|
.into()
|
||||||
|
}
|
||||||
|
&InferenceDiagnostic::UnresolvedAssocItem { id } => {
|
||||||
|
let expr_or_pat = match id {
|
||||||
|
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
|
||||||
|
ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right),
|
||||||
|
};
|
||||||
|
UnresolvedAssocItem { expr_or_pat }.into()
|
||||||
|
}
|
||||||
|
&InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break, bad_value_break } => {
|
||||||
|
let expr = expr_syntax(expr);
|
||||||
|
BreakOutsideOfLoop { expr, is_break, bad_value_break }.into()
|
||||||
|
}
|
||||||
|
InferenceDiagnostic::TypedHole { expr, expected } => {
|
||||||
|
let expr = expr_syntax(*expr);
|
||||||
|
TypedHole { expr, expected: Type::new(db, def, expected.clone()) }.into()
|
||||||
|
}
|
||||||
|
&InferenceDiagnostic::MismatchedTupleStructPatArgCount { pat, expected, found } => {
|
||||||
|
let expr_or_pat = match pat {
|
||||||
|
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
|
||||||
|
ExprOrPatId::PatId(pat) => {
|
||||||
|
let InFile { file_id, value } =
|
||||||
|
source_map.pat_syntax(pat).expect("unexpected synthetic");
|
||||||
|
|
||||||
|
// cast from Either<Pat, SelfParam> -> Either<_, Pat>
|
||||||
|
let Some(ptr) = AstPtr::try_from_raw(value.syntax_node_ptr()) else {
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
InFile { file_id, value: ptr }
|
||||||
|
}
|
||||||
|
};
|
||||||
|
MismatchedTupleStructPatArgCount { expr_or_pat, expected, found }.into()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -24,12 +24,12 @@
|
||||||
mod semantics;
|
mod semantics;
|
||||||
mod source_analyzer;
|
mod source_analyzer;
|
||||||
|
|
||||||
mod from_id;
|
|
||||||
mod attrs;
|
mod attrs;
|
||||||
|
mod from_id;
|
||||||
mod has_source;
|
mod has_source;
|
||||||
|
|
||||||
pub mod diagnostics;
|
|
||||||
pub mod db;
|
pub mod db;
|
||||||
|
pub mod diagnostics;
|
||||||
pub mod symbols;
|
pub mod symbols;
|
||||||
|
|
||||||
mod display;
|
mod display;
|
||||||
|
@ -61,7 +61,7 @@ use hir_def::{
|
||||||
use hir_expand::{attrs::collect_attrs, name::name, proc_macro::ProcMacroKind, MacroCallKind};
|
use hir_expand::{attrs::collect_attrs, name::name, proc_macro::ProcMacroKind, MacroCallKind};
|
||||||
use hir_ty::{
|
use hir_ty::{
|
||||||
all_super_traits, autoderef, check_orphan_rules,
|
all_super_traits, autoderef, check_orphan_rules,
|
||||||
consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt},
|
consteval::{try_const_usize, unknown_const_as_generic, ConstExt},
|
||||||
diagnostics::BodyValidationDiagnostic,
|
diagnostics::BodyValidationDiagnostic,
|
||||||
known_const_to_ast,
|
known_const_to_ast,
|
||||||
layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding},
|
layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding},
|
||||||
|
@ -76,7 +76,6 @@ use hir_ty::{
|
||||||
};
|
};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use nameres::diagnostics::DefDiagnosticKind;
|
use nameres::diagnostics::DefDiagnosticKind;
|
||||||
use once_cell::unsync::Lazy;
|
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use stdx::{impl_from, never};
|
use stdx::{impl_from, never};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
|
@ -132,8 +131,10 @@ pub use {
|
||||||
MacroFileIdExt,
|
MacroFileIdExt,
|
||||||
},
|
},
|
||||||
hir_ty::{
|
hir_ty::{
|
||||||
|
consteval::ConstEvalError,
|
||||||
display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
|
display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
|
||||||
layout::LayoutError,
|
layout::LayoutError,
|
||||||
|
mir::{MirEvalError, MirLowerError},
|
||||||
PointerCast, Safety,
|
PointerCast, Safety,
|
||||||
},
|
},
|
||||||
// FIXME: Properly encapsulate mir
|
// FIXME: Properly encapsulate mir
|
||||||
|
@ -234,8 +235,8 @@ impl Crate {
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
query: import_map::Query,
|
query: import_map::Query,
|
||||||
) -> impl Iterator<Item = Either<ModuleDef, Macro>> {
|
) -> impl Iterator<Item = Either<ModuleDef, Macro>> {
|
||||||
let _p = profile::span("query_external_importables");
|
let _p = tracing::span!(tracing::Level::INFO, "query_external_importables");
|
||||||
import_map::search_dependencies(db, self.into(), query).into_iter().map(|item| {
|
import_map::search_dependencies(db, self.into(), &query).into_iter().map(|item| {
|
||||||
match ItemInNs::from(item) {
|
match ItemInNs::from(item) {
|
||||||
ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id),
|
ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id),
|
||||||
ItemInNs::Macros(mac_id) => Either::Right(mac_id),
|
ItemInNs::Macros(mac_id) => Either::Right(mac_id),
|
||||||
|
@ -538,13 +539,8 @@ impl Module {
|
||||||
|
|
||||||
/// Fills `acc` with the module's diagnostics.
|
/// Fills `acc` with the module's diagnostics.
|
||||||
pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
|
pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
|
||||||
let _p = profile::span("Module::diagnostics").detail(|| {
|
let name = self.name(db);
|
||||||
format!(
|
let _p = tracing::span!(tracing::Level::INFO, "Module::diagnostics", ?name);
|
||||||
"{:?}",
|
|
||||||
self.name(db)
|
|
||||||
.map_or("<unknown>".into(), |name| name.display(db.upcast()).to_string())
|
|
||||||
)
|
|
||||||
});
|
|
||||||
let def_map = self.id.def_map(db.upcast());
|
let def_map = self.id.def_map(db.upcast());
|
||||||
for diag in def_map.diagnostics() {
|
for diag in def_map.diagnostics() {
|
||||||
if diag.in_module != self.id.local_id {
|
if diag.in_module != self.id.local_id {
|
||||||
|
@ -907,7 +903,7 @@ fn emit_def_diagnostic_(
|
||||||
}
|
}
|
||||||
DefDiagnosticKind::InvalidDeriveTarget { ast, id } => {
|
DefDiagnosticKind::InvalidDeriveTarget { ast, id } => {
|
||||||
let node = ast.to_node(db.upcast());
|
let node = ast.to_node(db.upcast());
|
||||||
let derive = node.attrs().nth(*id as usize);
|
let derive = node.attrs().nth(*id);
|
||||||
match derive {
|
match derive {
|
||||||
Some(derive) => {
|
Some(derive) => {
|
||||||
acc.push(
|
acc.push(
|
||||||
|
@ -922,7 +918,7 @@ fn emit_def_diagnostic_(
|
||||||
}
|
}
|
||||||
DefDiagnosticKind::MalformedDerive { ast, id } => {
|
DefDiagnosticKind::MalformedDerive { ast, id } => {
|
||||||
let node = ast.to_node(db.upcast());
|
let node = ast.to_node(db.upcast());
|
||||||
let derive = node.attrs().nth(*id as usize);
|
let derive = node.attrs().nth(*id);
|
||||||
match derive {
|
match derive {
|
||||||
Some(derive) => {
|
Some(derive) => {
|
||||||
acc.push(
|
acc.push(
|
||||||
|
@ -1592,181 +1588,43 @@ impl DefWithBody {
|
||||||
}
|
}
|
||||||
|
|
||||||
for diag in source_map.diagnostics() {
|
for diag in source_map.diagnostics() {
|
||||||
match diag {
|
acc.push(match diag {
|
||||||
BodyDiagnostic::InactiveCode { node, cfg, opts } => acc.push(
|
BodyDiagnostic::InactiveCode { node, cfg, opts } => {
|
||||||
InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into(),
|
InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into()
|
||||||
),
|
}
|
||||||
BodyDiagnostic::MacroError { node, message } => acc.push(
|
BodyDiagnostic::MacroError { node, message } => MacroError {
|
||||||
MacroError {
|
node: (*node).map(|it| it.into()),
|
||||||
node: (*node).map(|it| it.into()),
|
precise_location: None,
|
||||||
precise_location: None,
|
message: message.to_string(),
|
||||||
message: message.to_string(),
|
}
|
||||||
}
|
.into(),
|
||||||
.into(),
|
BodyDiagnostic::UnresolvedProcMacro { node, krate } => UnresolvedProcMacro {
|
||||||
),
|
node: (*node).map(|it| it.into()),
|
||||||
BodyDiagnostic::UnresolvedProcMacro { node, krate } => acc.push(
|
precise_location: None,
|
||||||
UnresolvedProcMacro {
|
macro_name: None,
|
||||||
node: (*node).map(|it| it.into()),
|
kind: MacroKind::ProcMacro,
|
||||||
precise_location: None,
|
krate: *krate,
|
||||||
macro_name: None,
|
}
|
||||||
kind: MacroKind::ProcMacro,
|
.into(),
|
||||||
krate: *krate,
|
BodyDiagnostic::UnresolvedMacroCall { node, path } => UnresolvedMacroCall {
|
||||||
}
|
macro_call: (*node).map(|ast_ptr| ast_ptr.into()),
|
||||||
.into(),
|
precise_location: None,
|
||||||
),
|
path: path.clone(),
|
||||||
BodyDiagnostic::UnresolvedMacroCall { node, path } => acc.push(
|
is_bang: true,
|
||||||
UnresolvedMacroCall {
|
}
|
||||||
macro_call: (*node).map(|ast_ptr| ast_ptr.into()),
|
.into(),
|
||||||
precise_location: None,
|
|
||||||
path: path.clone(),
|
|
||||||
is_bang: true,
|
|
||||||
}
|
|
||||||
.into(),
|
|
||||||
),
|
|
||||||
BodyDiagnostic::UnreachableLabel { node, name } => {
|
BodyDiagnostic::UnreachableLabel { node, name } => {
|
||||||
acc.push(UnreachableLabel { node: *node, name: name.clone() }.into())
|
UnreachableLabel { node: *node, name: name.clone() }.into()
|
||||||
}
|
}
|
||||||
BodyDiagnostic::UndeclaredLabel { node, name } => {
|
BodyDiagnostic::UndeclaredLabel { node, name } => {
|
||||||
acc.push(UndeclaredLabel { node: *node, name: name.clone() }.into())
|
UndeclaredLabel { node: *node, name: name.clone() }.into()
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let infer = db.infer(self.into());
|
let infer = db.infer(self.into());
|
||||||
let source_map = Lazy::new(|| db.body_with_source_map(self.into()).1);
|
|
||||||
let expr_syntax = |expr| source_map.expr_syntax(expr).expect("unexpected synthetic");
|
|
||||||
let pat_syntax = |pat| source_map.pat_syntax(pat).expect("unexpected synthetic");
|
|
||||||
for d in &infer.diagnostics {
|
for d in &infer.diagnostics {
|
||||||
match d {
|
acc.extend(AnyDiagnostic::inference_diagnostic(db, self.into(), d, &source_map));
|
||||||
&hir_ty::InferenceDiagnostic::NoSuchField { field: expr, private } => {
|
|
||||||
let expr_or_pat = match expr {
|
|
||||||
ExprOrPatId::ExprId(expr) => {
|
|
||||||
source_map.field_syntax(expr).map(AstPtr::wrap_left)
|
|
||||||
}
|
|
||||||
ExprOrPatId::PatId(pat) => {
|
|
||||||
source_map.pat_field_syntax(pat).map(AstPtr::wrap_right)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
acc.push(NoSuchField { field: expr_or_pat, private }.into())
|
|
||||||
}
|
|
||||||
&hir_ty::InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
|
|
||||||
acc.push(
|
|
||||||
MismatchedArgCount { call_expr: expr_syntax(call_expr), expected, found }
|
|
||||||
.into(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
&hir_ty::InferenceDiagnostic::PrivateField { expr, field } => {
|
|
||||||
let expr = expr_syntax(expr);
|
|
||||||
let field = field.into();
|
|
||||||
acc.push(PrivateField { expr, field }.into())
|
|
||||||
}
|
|
||||||
&hir_ty::InferenceDiagnostic::PrivateAssocItem { id, item } => {
|
|
||||||
let expr_or_pat = match id {
|
|
||||||
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
|
|
||||||
ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right),
|
|
||||||
};
|
|
||||||
let item = item.into();
|
|
||||||
acc.push(PrivateAssocItem { expr_or_pat, item }.into())
|
|
||||||
}
|
|
||||||
hir_ty::InferenceDiagnostic::ExpectedFunction { call_expr, found } => {
|
|
||||||
let call_expr = expr_syntax(*call_expr);
|
|
||||||
|
|
||||||
acc.push(
|
|
||||||
ExpectedFunction {
|
|
||||||
call: call_expr,
|
|
||||||
found: Type::new(db, DefWithBodyId::from(self), found.clone()),
|
|
||||||
}
|
|
||||||
.into(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
hir_ty::InferenceDiagnostic::UnresolvedField {
|
|
||||||
expr,
|
|
||||||
receiver,
|
|
||||||
name,
|
|
||||||
method_with_same_name_exists,
|
|
||||||
} => {
|
|
||||||
let expr = expr_syntax(*expr);
|
|
||||||
|
|
||||||
acc.push(
|
|
||||||
UnresolvedField {
|
|
||||||
expr,
|
|
||||||
name: name.clone(),
|
|
||||||
receiver: Type::new(db, DefWithBodyId::from(self), receiver.clone()),
|
|
||||||
method_with_same_name_exists: *method_with_same_name_exists,
|
|
||||||
}
|
|
||||||
.into(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
hir_ty::InferenceDiagnostic::UnresolvedMethodCall {
|
|
||||||
expr,
|
|
||||||
receiver,
|
|
||||||
name,
|
|
||||||
field_with_same_name,
|
|
||||||
assoc_func_with_same_name,
|
|
||||||
} => {
|
|
||||||
let expr = expr_syntax(*expr);
|
|
||||||
|
|
||||||
acc.push(
|
|
||||||
UnresolvedMethodCall {
|
|
||||||
expr,
|
|
||||||
name: name.clone(),
|
|
||||||
receiver: Type::new(db, DefWithBodyId::from(self), receiver.clone()),
|
|
||||||
field_with_same_name: field_with_same_name
|
|
||||||
.clone()
|
|
||||||
.map(|ty| Type::new(db, DefWithBodyId::from(self), ty)),
|
|
||||||
assoc_func_with_same_name: *assoc_func_with_same_name,
|
|
||||||
}
|
|
||||||
.into(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
&hir_ty::InferenceDiagnostic::UnresolvedAssocItem { id } => {
|
|
||||||
let expr_or_pat = match id {
|
|
||||||
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
|
|
||||||
ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right),
|
|
||||||
};
|
|
||||||
acc.push(UnresolvedAssocItem { expr_or_pat }.into())
|
|
||||||
}
|
|
||||||
&hir_ty::InferenceDiagnostic::BreakOutsideOfLoop {
|
|
||||||
expr,
|
|
||||||
is_break,
|
|
||||||
bad_value_break,
|
|
||||||
} => {
|
|
||||||
let expr = expr_syntax(expr);
|
|
||||||
acc.push(BreakOutsideOfLoop { expr, is_break, bad_value_break }.into())
|
|
||||||
}
|
|
||||||
hir_ty::InferenceDiagnostic::TypedHole { expr, expected } => {
|
|
||||||
let expr = expr_syntax(*expr);
|
|
||||||
acc.push(
|
|
||||||
TypedHole {
|
|
||||||
expr,
|
|
||||||
expected: Type::new(db, DefWithBodyId::from(self), expected.clone()),
|
|
||||||
}
|
|
||||||
.into(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
&hir_ty::InferenceDiagnostic::MismatchedTupleStructPatArgCount {
|
|
||||||
pat,
|
|
||||||
expected,
|
|
||||||
found,
|
|
||||||
} => {
|
|
||||||
let expr_or_pat = match pat {
|
|
||||||
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
|
|
||||||
ExprOrPatId::PatId(pat) => {
|
|
||||||
let InFile { file_id, value } =
|
|
||||||
source_map.pat_syntax(pat).expect("unexpected synthetic");
|
|
||||||
|
|
||||||
// cast from Either<Pat, SelfParam> -> Either<_, Pat>
|
|
||||||
let Some(ptr) = AstPtr::try_from_raw(value.syntax_node_ptr()) else {
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
InFile { file_id, value: ptr }
|
|
||||||
}
|
|
||||||
};
|
|
||||||
acc.push(
|
|
||||||
MismatchedTupleStructPatArgCount { expr_or_pat, expected, found }.into(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
for (pat_or_expr, mismatch) in infer.type_mismatches() {
|
for (pat_or_expr, mismatch) in infer.type_mismatches() {
|
||||||
let expr_or_pat = match pat_or_expr {
|
let expr_or_pat = match pat_or_expr {
|
||||||
|
@ -1805,8 +1663,6 @@ impl DefWithBody {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let hir_body = db.body(self.into());
|
|
||||||
|
|
||||||
if let Ok(borrowck_results) = db.borrowck(self.into()) {
|
if let Ok(borrowck_results) = db.borrowck(self.into()) {
|
||||||
for borrowck_result in borrowck_results.iter() {
|
for borrowck_result in borrowck_results.iter() {
|
||||||
let mir_body = &borrowck_result.mir_body;
|
let mir_body = &borrowck_result.mir_body;
|
||||||
|
@ -1828,7 +1684,7 @@ impl DefWithBody {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
let mol = &borrowck_result.mutability_of_locals;
|
let mol = &borrowck_result.mutability_of_locals;
|
||||||
for (binding_id, binding_data) in hir_body.bindings.iter() {
|
for (binding_id, binding_data) in body.bindings.iter() {
|
||||||
if binding_data.problems.is_some() {
|
if binding_data.problems.is_some() {
|
||||||
// We should report specific diagnostics for these problems, not `need-mut` and `unused-mut`.
|
// We should report specific diagnostics for these problems, not `need-mut` and `unused-mut`.
|
||||||
continue;
|
continue;
|
||||||
|
@ -1890,109 +1746,7 @@ impl DefWithBody {
|
||||||
}
|
}
|
||||||
|
|
||||||
for diagnostic in BodyValidationDiagnostic::collect(db, self.into()) {
|
for diagnostic in BodyValidationDiagnostic::collect(db, self.into()) {
|
||||||
match diagnostic {
|
acc.extend(AnyDiagnostic::body_validation_diagnostic(db, diagnostic, &source_map));
|
||||||
BodyValidationDiagnostic::RecordMissingFields {
|
|
||||||
record,
|
|
||||||
variant,
|
|
||||||
missed_fields,
|
|
||||||
} => {
|
|
||||||
let variant_data = variant.variant_data(db.upcast());
|
|
||||||
let missed_fields = missed_fields
|
|
||||||
.into_iter()
|
|
||||||
.map(|idx| variant_data.fields()[idx].name.clone())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
match record {
|
|
||||||
Either::Left(record_expr) => match source_map.expr_syntax(record_expr) {
|
|
||||||
Ok(source_ptr) => {
|
|
||||||
let root = source_ptr.file_syntax(db.upcast());
|
|
||||||
if let ast::Expr::RecordExpr(record_expr) =
|
|
||||||
source_ptr.value.to_node(&root)
|
|
||||||
{
|
|
||||||
if record_expr.record_expr_field_list().is_some() {
|
|
||||||
let field_list_parent_path =
|
|
||||||
record_expr.path().map(|path| AstPtr::new(&path));
|
|
||||||
acc.push(
|
|
||||||
MissingFields {
|
|
||||||
file: source_ptr.file_id,
|
|
||||||
field_list_parent: AstPtr::new(&Either::Left(
|
|
||||||
record_expr,
|
|
||||||
)),
|
|
||||||
field_list_parent_path,
|
|
||||||
missed_fields,
|
|
||||||
}
|
|
||||||
.into(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(SyntheticSyntax) => (),
|
|
||||||
},
|
|
||||||
Either::Right(record_pat) => match source_map.pat_syntax(record_pat) {
|
|
||||||
Ok(source_ptr) => {
|
|
||||||
if let Some(ptr) = source_ptr.value.cast::<ast::RecordPat>() {
|
|
||||||
let root = source_ptr.file_syntax(db.upcast());
|
|
||||||
let record_pat = ptr.to_node(&root);
|
|
||||||
if record_pat.record_pat_field_list().is_some() {
|
|
||||||
let field_list_parent_path =
|
|
||||||
record_pat.path().map(|path| AstPtr::new(&path));
|
|
||||||
acc.push(
|
|
||||||
MissingFields {
|
|
||||||
file: source_ptr.file_id,
|
|
||||||
field_list_parent: AstPtr::new(&Either::Right(
|
|
||||||
record_pat,
|
|
||||||
)),
|
|
||||||
field_list_parent_path,
|
|
||||||
missed_fields,
|
|
||||||
}
|
|
||||||
.into(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(SyntheticSyntax) => (),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap { method_call_expr } => {
|
|
||||||
if let Ok(next_source_ptr) = source_map.expr_syntax(method_call_expr) {
|
|
||||||
acc.push(
|
|
||||||
ReplaceFilterMapNextWithFindMap {
|
|
||||||
file: next_source_ptr.file_id,
|
|
||||||
next_expr: next_source_ptr.value,
|
|
||||||
}
|
|
||||||
.into(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
BodyValidationDiagnostic::MissingMatchArms { match_expr, uncovered_patterns } => {
|
|
||||||
match source_map.expr_syntax(match_expr) {
|
|
||||||
Ok(source_ptr) => {
|
|
||||||
let root = source_ptr.file_syntax(db.upcast());
|
|
||||||
if let ast::Expr::MatchExpr(match_expr) =
|
|
||||||
&source_ptr.value.to_node(&root)
|
|
||||||
{
|
|
||||||
match match_expr.expr() {
|
|
||||||
Some(scrut_expr) if match_expr.match_arm_list().is_some() => {
|
|
||||||
acc.push(
|
|
||||||
MissingMatchArms {
|
|
||||||
scrutinee_expr: InFile::new(
|
|
||||||
source_ptr.file_id,
|
|
||||||
AstPtr::new(&scrut_expr),
|
|
||||||
),
|
|
||||||
uncovered_patterns,
|
|
||||||
}
|
|
||||||
.into(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(SyntheticSyntax) => (),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let def: ModuleDef = match self {
|
let def: ModuleDef = match self {
|
||||||
|
@ -2008,7 +1762,6 @@ impl DefWithBody {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
pub struct Function {
|
pub struct Function {
|
||||||
pub(crate) id: FunctionId,
|
pub(crate) id: FunctionId,
|
||||||
|
@ -4299,7 +4052,7 @@ impl Type {
|
||||||
name: Option<&Name>,
|
name: Option<&Name>,
|
||||||
mut callback: impl FnMut(Function) -> Option<T>,
|
mut callback: impl FnMut(Function) -> Option<T>,
|
||||||
) -> Option<T> {
|
) -> Option<T> {
|
||||||
let _p = profile::span("iterate_method_candidates");
|
let _p = tracing::span!(tracing::Level::INFO, "iterate_method_candidates");
|
||||||
let mut slot = None;
|
let mut slot = None;
|
||||||
|
|
||||||
self.iterate_method_candidates_dyn(
|
self.iterate_method_candidates_dyn(
|
||||||
|
@ -4378,7 +4131,7 @@ impl Type {
|
||||||
name: Option<&Name>,
|
name: Option<&Name>,
|
||||||
mut callback: impl FnMut(AssocItem) -> Option<T>,
|
mut callback: impl FnMut(AssocItem) -> Option<T>,
|
||||||
) -> Option<T> {
|
) -> Option<T> {
|
||||||
let _p = profile::span("iterate_path_candidates");
|
let _p = tracing::span!(tracing::Level::INFO, "iterate_path_candidates");
|
||||||
let mut slot = None;
|
let mut slot = None;
|
||||||
self.iterate_path_candidates_dyn(
|
self.iterate_path_candidates_dyn(
|
||||||
db,
|
db,
|
||||||
|
@ -4444,7 +4197,7 @@ impl Type {
|
||||||
&'a self,
|
&'a self,
|
||||||
db: &'a dyn HirDatabase,
|
db: &'a dyn HirDatabase,
|
||||||
) -> impl Iterator<Item = Trait> + 'a {
|
) -> impl Iterator<Item = Trait> + 'a {
|
||||||
let _p = profile::span("applicable_inherent_traits");
|
let _p = tracing::span!(tracing::Level::INFO, "applicable_inherent_traits");
|
||||||
self.autoderef_(db)
|
self.autoderef_(db)
|
||||||
.filter_map(|ty| ty.dyn_trait())
|
.filter_map(|ty| ty.dyn_trait())
|
||||||
.flat_map(move |dyn_trait_id| hir_ty::all_super_traits(db.upcast(), dyn_trait_id))
|
.flat_map(move |dyn_trait_id| hir_ty::all_super_traits(db.upcast(), dyn_trait_id))
|
||||||
|
@ -4452,7 +4205,7 @@ impl Type {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn env_traits<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Trait> + 'a {
|
pub fn env_traits<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Trait> + 'a {
|
||||||
let _p = profile::span("env_traits");
|
let _p = tracing::span!(tracing::Level::INFO, "env_traits");
|
||||||
self.autoderef_(db)
|
self.autoderef_(db)
|
||||||
.filter(|ty| matches!(ty.kind(Interner), TyKind::Placeholder(_)))
|
.filter(|ty| matches!(ty.kind(Interner), TyKind::Placeholder(_)))
|
||||||
.flat_map(|ty| {
|
.flat_map(|ty| {
|
||||||
|
|
|
@ -25,6 +25,7 @@ use hir_expand::{
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
|
use span::{Span, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
|
||||||
use stdx::TupleExt;
|
use stdx::TupleExt;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
algo::skip_trivia_token,
|
algo::skip_trivia_token,
|
||||||
|
@ -131,6 +132,7 @@ pub struct SemanticsImpl<'db> {
|
||||||
/// Rootnode to HirFileId cache
|
/// Rootnode to HirFileId cache
|
||||||
cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
|
cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
|
||||||
// These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens
|
// These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens
|
||||||
|
// So we might wanna move them out into something specific for semantic highlighting
|
||||||
expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>,
|
expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>,
|
||||||
/// MacroCall to its expansion's MacroFileId cache
|
/// MacroCall to its expansion's MacroFileId cache
|
||||||
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
|
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
|
||||||
|
@ -607,29 +609,102 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
fn descend_into_macros_impl(
|
// return:
|
||||||
|
// SourceAnalyzer(file_id that original call include!)
|
||||||
|
// macro file id
|
||||||
|
// token in include! macro mapped from token in params
|
||||||
|
// span for the mapped token
|
||||||
|
fn is_from_include_file(
|
||||||
&self,
|
&self,
|
||||||
token: SyntaxToken,
|
token: SyntaxToken,
|
||||||
|
) -> Option<(SourceAnalyzer, HirFileId, SyntaxToken, Span)> {
|
||||||
|
let parent = token.parent()?;
|
||||||
|
let file_id = self.find_file(&parent).file_id.file_id()?;
|
||||||
|
|
||||||
|
let mut cache = self.expansion_info_cache.borrow_mut();
|
||||||
|
|
||||||
|
// iterate related crates and find all include! invocations that include_file_id matches
|
||||||
|
for (invoc, _) in self
|
||||||
|
.db
|
||||||
|
.relevant_crates(file_id)
|
||||||
|
.iter()
|
||||||
|
.flat_map(|krate| self.db.include_macro_invoc(*krate))
|
||||||
|
.filter(|&(_, include_file_id)| include_file_id == file_id)
|
||||||
|
{
|
||||||
|
let macro_file = invoc.as_macro_file();
|
||||||
|
let expansion_info = cache
|
||||||
|
.entry(macro_file)
|
||||||
|
.or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
|
||||||
|
|
||||||
|
// Create the source analyzer for the macro call scope
|
||||||
|
let Some(sa) = self.analyze_no_infer(&self.parse_or_expand(expansion_info.call_file()))
|
||||||
|
else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
{
|
||||||
|
let InMacroFile { file_id: macro_file, value } = expansion_info.expanded();
|
||||||
|
self.cache(value, macro_file.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
// get mapped token in the include! macro file
|
||||||
|
let span = span::SpanData {
|
||||||
|
range: token.text_range(),
|
||||||
|
anchor: span::SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
|
||||||
|
ctx: SyntaxContextId::ROOT,
|
||||||
|
};
|
||||||
|
let Some(InMacroFile { file_id, value: mut mapped_tokens }) =
|
||||||
|
expansion_info.map_range_down(span)
|
||||||
|
else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
|
// if we find one, then return
|
||||||
|
if let Some(t) = mapped_tokens.next() {
|
||||||
|
return Some((sa, file_id.into(), t, span));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn descend_into_macros_impl(
|
||||||
|
&self,
|
||||||
|
mut token: SyntaxToken,
|
||||||
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
|
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
|
||||||
) {
|
) {
|
||||||
let _p = profile::span("descend_into_macros");
|
let _p = tracing::span!(tracing::Level::INFO, "descend_into_macros");
|
||||||
let sa = match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
|
let (sa, span, file_id) =
|
||||||
Some(it) => it,
|
match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
|
||||||
None => return,
|
Some(sa) => match sa.file_id.file_id() {
|
||||||
};
|
Some(file_id) => (
|
||||||
|
sa,
|
||||||
let span = match sa.file_id.file_id() {
|
self.db.real_span_map(file_id).span_for_range(token.text_range()),
|
||||||
Some(file_id) => self.db.real_span_map(file_id).span_for_range(token.text_range()),
|
file_id.into(),
|
||||||
None => {
|
),
|
||||||
stdx::never!();
|
None => {
|
||||||
return;
|
stdx::never!();
|
||||||
}
|
return;
|
||||||
};
|
}
|
||||||
|
},
|
||||||
|
None => {
|
||||||
|
// if we cannot find a source analyzer for this token, then we try to find out
|
||||||
|
// whether this file is an included file and treat that as the include input
|
||||||
|
let Some((it, macro_file_id, mapped_token, s)) =
|
||||||
|
self.is_from_include_file(token)
|
||||||
|
else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
token = mapped_token;
|
||||||
|
(it, s, macro_file_id)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let mut cache = self.expansion_info_cache.borrow_mut();
|
let mut cache = self.expansion_info_cache.borrow_mut();
|
||||||
let mut mcache = self.macro_call_cache.borrow_mut();
|
let mut mcache = self.macro_call_cache.borrow_mut();
|
||||||
let def_map = sa.resolver.def_map();
|
let def_map = sa.resolver.def_map();
|
||||||
|
|
||||||
|
let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])];
|
||||||
|
|
||||||
let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
|
let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
|
||||||
let expansion_info = cache
|
let expansion_info = cache
|
||||||
.entry(macro_file)
|
.entry(macro_file)
|
||||||
|
@ -651,8 +726,6 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
res
|
res
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(sa.file_id, smallvec![token])];
|
|
||||||
|
|
||||||
while let Some((file_id, mut tokens)) = stack.pop() {
|
while let Some((file_id, mut tokens)) = stack.pop() {
|
||||||
while let Some(token) = tokens.pop() {
|
while let Some(token) = tokens.pop() {
|
||||||
let was_not_remapped = (|| {
|
let was_not_remapped = (|| {
|
||||||
|
@ -1222,7 +1295,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
offset: Option<TextSize>,
|
offset: Option<TextSize>,
|
||||||
infer_body: bool,
|
infer_body: bool,
|
||||||
) -> Option<SourceAnalyzer> {
|
) -> Option<SourceAnalyzer> {
|
||||||
let _p = profile::span("Semantics::analyze_impl");
|
let _p = tracing::span!(tracing::Level::INFO, "Semantics::analyze_impl");
|
||||||
let node = self.find_file(node);
|
let node = self.find_file(node);
|
||||||
|
|
||||||
let container = self.with_ctx(|ctx| ctx.find_container(node))?;
|
let container = self.with_ctx(|ctx| ctx.find_container(node))?;
|
||||||
|
|
|
@ -117,7 +117,7 @@ pub(super) struct SourceToDefCtx<'a, 'b> {
|
||||||
|
|
||||||
impl SourceToDefCtx<'_, '_> {
|
impl SourceToDefCtx<'_, '_> {
|
||||||
pub(super) fn file_to_def(&self, file: FileId) -> SmallVec<[ModuleId; 1]> {
|
pub(super) fn file_to_def(&self, file: FileId) -> SmallVec<[ModuleId; 1]> {
|
||||||
let _p = profile::span("SourceBinder::to_module_def");
|
let _p = tracing::span!(tracing::Level::INFO, "SourceBinder::to_module_def");
|
||||||
let mut mods = SmallVec::new();
|
let mut mods = SmallVec::new();
|
||||||
for &crate_id in self.db.relevant_crates(file).iter() {
|
for &crate_id in self.db.relevant_crates(file).iter() {
|
||||||
// FIXME: inner items
|
// FIXME: inner items
|
||||||
|
@ -132,7 +132,7 @@ impl SourceToDefCtx<'_, '_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn module_to_def(&self, src: InFile<ast::Module>) -> Option<ModuleId> {
|
pub(super) fn module_to_def(&self, src: InFile<ast::Module>) -> Option<ModuleId> {
|
||||||
let _p = profile::span("module_to_def");
|
let _p = tracing::span!(tracing::Level::INFO, "module_to_def");
|
||||||
let parent_declaration = src
|
let parent_declaration = src
|
||||||
.syntax()
|
.syntax()
|
||||||
.ancestors_with_macros_skip_attr_item(self.db.upcast())
|
.ancestors_with_macros_skip_attr_item(self.db.upcast())
|
||||||
|
@ -153,7 +153,7 @@ impl SourceToDefCtx<'_, '_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn source_file_to_def(&self, src: InFile<ast::SourceFile>) -> Option<ModuleId> {
|
pub(super) fn source_file_to_def(&self, src: InFile<ast::SourceFile>) -> Option<ModuleId> {
|
||||||
let _p = profile::span("source_file_to_def");
|
let _p = tracing::span!(tracing::Level::INFO, "source_file_to_def");
|
||||||
let file_id = src.file_id.original_file(self.db.upcast());
|
let file_id = src.file_id.original_file(self.db.upcast());
|
||||||
self.file_to_def(file_id).first().copied()
|
self.file_to_def(file_id).first().copied()
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,7 @@ cov-mark = "2.0.0-pre.1"
|
||||||
itertools.workspace = true
|
itertools.workspace = true
|
||||||
either.workspace = true
|
either.workspace = true
|
||||||
smallvec.workspace = true
|
smallvec.workspace = true
|
||||||
|
tracing.workspace = true
|
||||||
|
|
||||||
# local deps
|
# local deps
|
||||||
stdx.workspace = true
|
stdx.workspace = true
|
||||||
|
@ -38,4 +39,4 @@ sourcegen.workspace = true
|
||||||
in-rust-tree = []
|
in-rust-tree = []
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
|
@ -105,7 +105,7 @@ fn add_missing_impl_members_inner(
|
||||||
assist_id: &'static str,
|
assist_id: &'static str,
|
||||||
label: &'static str,
|
label: &'static str,
|
||||||
) -> Option<()> {
|
) -> Option<()> {
|
||||||
let _p = profile::span("add_missing_impl_members_inner");
|
let _p = tracing::span!(tracing::Level::INFO, "add_missing_impl_members_inner");
|
||||||
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?;
|
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?;
|
||||||
let impl_ = ctx.sema.to_def(&impl_def)?;
|
let impl_ = ctx.sema.to_def(&impl_def)?;
|
||||||
|
|
||||||
|
@ -370,17 +370,17 @@ impl<U> Foo<U> for S {
|
||||||
add_missing_impl_members,
|
add_missing_impl_members,
|
||||||
r#"
|
r#"
|
||||||
pub trait Trait<'a, 'b, A, B, C> {
|
pub trait Trait<'a, 'b, A, B, C> {
|
||||||
fn foo(&self, one: &'a A, anoter: &'b B) -> &'a C;
|
fn foo(&self, one: &'a A, another: &'b B) -> &'a C;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'x, 'y, T, V, U> Trait<'x, 'y, T, V, U> for () {$0}"#,
|
impl<'x, 'y, T, V, U> Trait<'x, 'y, T, V, U> for () {$0}"#,
|
||||||
r#"
|
r#"
|
||||||
pub trait Trait<'a, 'b, A, B, C> {
|
pub trait Trait<'a, 'b, A, B, C> {
|
||||||
fn foo(&self, one: &'a A, anoter: &'b B) -> &'a C;
|
fn foo(&self, one: &'a A, another: &'b B) -> &'a C;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'x, 'y, T, V, U> Trait<'x, 'y, T, V, U> for () {
|
impl<'x, 'y, T, V, U> Trait<'x, 'y, T, V, U> for () {
|
||||||
fn foo(&self, one: &'x T, anoter: &'y V) -> &'x U {
|
fn foo(&self, one: &'x T, another: &'y V) -> &'x U {
|
||||||
${0:todo!()}
|
${0:todo!()}
|
||||||
}
|
}
|
||||||
}"#,
|
}"#,
|
||||||
|
@ -393,7 +393,7 @@ impl<'x, 'y, T, V, U> Trait<'x, 'y, T, V, U> for () {
|
||||||
add_missing_default_members,
|
add_missing_default_members,
|
||||||
r#"
|
r#"
|
||||||
pub trait Trait<'a, 'b, A, B, C: Default> {
|
pub trait Trait<'a, 'b, A, B, C: Default> {
|
||||||
fn foo(&self, _one: &'a A, _anoter: &'b B) -> (C, &'a i32) {
|
fn foo(&self, _one: &'a A, _another: &'b B) -> (C, &'a i32) {
|
||||||
let value: &'a i32 = &0;
|
let value: &'a i32 = &0;
|
||||||
(C::default(), value)
|
(C::default(), value)
|
||||||
}
|
}
|
||||||
|
@ -402,14 +402,14 @@ pub trait Trait<'a, 'b, A, B, C: Default> {
|
||||||
impl<'x, 'y, T, V, U: Default> Trait<'x, 'y, T, V, U> for () {$0}"#,
|
impl<'x, 'y, T, V, U: Default> Trait<'x, 'y, T, V, U> for () {$0}"#,
|
||||||
r#"
|
r#"
|
||||||
pub trait Trait<'a, 'b, A, B, C: Default> {
|
pub trait Trait<'a, 'b, A, B, C: Default> {
|
||||||
fn foo(&self, _one: &'a A, _anoter: &'b B) -> (C, &'a i32) {
|
fn foo(&self, _one: &'a A, _another: &'b B) -> (C, &'a i32) {
|
||||||
let value: &'a i32 = &0;
|
let value: &'a i32 = &0;
|
||||||
(C::default(), value)
|
(C::default(), value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'x, 'y, T, V, U: Default> Trait<'x, 'y, T, V, U> for () {
|
impl<'x, 'y, T, V, U: Default> Trait<'x, 'y, T, V, U> for () {
|
||||||
$0fn foo(&self, _one: &'x T, _anoter: &'y V) -> (U, &'x i32) {
|
$0fn foo(&self, _one: &'x T, _another: &'y V) -> (U, &'x i32) {
|
||||||
let value: &'x i32 = &0;
|
let value: &'x i32 = &0;
|
||||||
(<U>::default(), value)
|
(<U>::default(), value)
|
||||||
}
|
}
|
||||||
|
|
|
@ -163,7 +163,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
|
||||||
block
|
block
|
||||||
} else {
|
} else {
|
||||||
// `expr_replace` is a descendant of `to_wrap`, so both steps need to be
|
// `expr_replace` is a descendant of `to_wrap`, so both steps need to be
|
||||||
// handled seperately, otherwise we wrap the wrong expression
|
// handled separately, otherwise we wrap the wrong expression
|
||||||
let to_wrap = edit.make_mut(to_wrap);
|
let to_wrap = edit.make_mut(to_wrap);
|
||||||
|
|
||||||
// Replace the target expr first so that we don't need to find where
|
// Replace the target expr first so that we don't need to find where
|
||||||
|
|
|
@ -418,7 +418,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn new_function_with_generics_and_wheres() {
|
fn new_function_with_generics_and_where() {
|
||||||
check_assist(
|
check_assist(
|
||||||
generate_default_from_new,
|
generate_default_from_new,
|
||||||
r#"
|
r#"
|
||||||
|
|
|
@ -295,7 +295,7 @@ fn generate_impl(
|
||||||
// those in strukt.
|
// those in strukt.
|
||||||
//
|
//
|
||||||
// These generics parameters will also be used in `field_ty` and
|
// These generics parameters will also be used in `field_ty` and
|
||||||
// `where_clauses`, so we should substitude arguments in them as well.
|
// `where_clauses`, so we should substitute arguments in them as well.
|
||||||
let strukt_params = resolve_name_conflicts(strukt_params, &old_impl_params);
|
let strukt_params = resolve_name_conflicts(strukt_params, &old_impl_params);
|
||||||
let (field_ty, ty_where_clause) = match &strukt_params {
|
let (field_ty, ty_where_clause) = match &strukt_params {
|
||||||
Some(strukt_params) => {
|
Some(strukt_params) => {
|
||||||
|
@ -491,7 +491,7 @@ fn remove_useless_where_clauses(trait_ty: &ast::Type, self_ty: &ast::Type, wc: a
|
||||||
|
|
||||||
// Generate generic args that should be apply to current impl.
|
// Generate generic args that should be apply to current impl.
|
||||||
//
|
//
|
||||||
// For exmaple, say we have implementation `impl<A, B, C> Trait for B<A>`,
|
// For example, say we have implementation `impl<A, B, C> Trait for B<A>`,
|
||||||
// and `b: B<T>` in struct `S<T>`. Then the `A` should be instantiated to `T`.
|
// and `b: B<T>` in struct `S<T>`. Then the `A` should be instantiated to `T`.
|
||||||
// While the last two generic args `B` and `C` doesn't change, it remains
|
// While the last two generic args `B` and `C` doesn't change, it remains
|
||||||
// `<B, C>`. So we apply `<T, B, C>` as generic arguments to impl.
|
// `<B, C>`. So we apply `<T, B, C>` as generic arguments to impl.
|
||||||
|
@ -637,7 +637,7 @@ fn const_assoc_item(item: syntax::ast::Const, qual_path_ty: ast::Path) -> Option
|
||||||
let path_expr_segment = make::path_from_text(item.name()?.to_string().as_str());
|
let path_expr_segment = make::path_from_text(item.name()?.to_string().as_str());
|
||||||
|
|
||||||
// We want rhs of the const assignment to be a qualified path
|
// We want rhs of the const assignment to be a qualified path
|
||||||
// The general case for const assigment can be found [here](`https://doc.rust-lang.org/reference/items/constant-items.html`)
|
// The general case for const assignment can be found [here](`https://doc.rust-lang.org/reference/items/constant-items.html`)
|
||||||
// The qualified will have the following generic syntax :
|
// The qualified will have the following generic syntax :
|
||||||
// <Base as Trait<GenArgs>>::ConstName;
|
// <Base as Trait<GenArgs>>::ConstName;
|
||||||
// FIXME : We can't rely on `make::path_qualified` for now but it would be nice to replace the following with it.
|
// FIXME : We can't rely on `make::path_qualified` for now but it would be nice to replace the following with it.
|
||||||
|
@ -779,7 +779,7 @@ impl Trait for Base {}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_self_ty() {
|
fn test_self_ty() {
|
||||||
// trait whith `Self` type cannot be delegated
|
// trait with `Self` type cannot be delegated
|
||||||
//
|
//
|
||||||
// See the function `fn f() -> Self`.
|
// See the function `fn f() -> Self`.
|
||||||
// It should be `fn f() -> Base` in `Base`, and `fn f() -> S` in `S`
|
// It should be `fn f() -> Base` in `Base`, and `fn f() -> S` in `S`
|
||||||
|
|
|
@ -6,7 +6,7 @@ use syntax::{
|
||||||
|
|
||||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||||
|
|
||||||
// FIXME: Generate proper `index_mut` method body refer to `index` method body may impossible due to the unpredicable case [#15581].
|
// FIXME: Generate proper `index_mut` method body refer to `index` method body may impossible due to the unpredictable case [#15581].
|
||||||
// Here just leave the `index_mut` method body be same as `index` method body, user can modify it manually to meet their need.
|
// Here just leave the `index_mut` method body be same as `index` method body, user can modify it manually to meet their need.
|
||||||
|
|
||||||
// Assist: generate_mut_trait_impl
|
// Assist: generate_mut_trait_impl
|
||||||
|
|
|
@ -183,7 +183,7 @@ fn remove_items_visibility(item: &ast::AssocItem) {
|
||||||
fn strip_body(item: &ast::AssocItem) {
|
fn strip_body(item: &ast::AssocItem) {
|
||||||
if let ast::AssocItem::Fn(f) = item {
|
if let ast::AssocItem::Fn(f) = item {
|
||||||
if let Some(body) = f.body() {
|
if let Some(body) = f.body() {
|
||||||
// In constrast to function bodies, we want to see no ws before a semicolon.
|
// In contrast to function bodies, we want to see no ws before a semicolon.
|
||||||
// So let's remove them if we see any.
|
// So let's remove them if we see any.
|
||||||
if let Some(prev) = body.syntax().prev_sibling_or_token() {
|
if let Some(prev) = body.syntax().prev_sibling_or_token() {
|
||||||
if prev.kind() == SyntaxKind::WHITESPACE {
|
if prev.kind() == SyntaxKind::WHITESPACE {
|
||||||
|
|
|
@ -120,7 +120,7 @@ fn main() -> () {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn fromed_in_child_mod_imported() {
|
fn from_in_child_mod_imported() {
|
||||||
check_assist(
|
check_assist(
|
||||||
into_to_qualified_from,
|
into_to_qualified_from,
|
||||||
r#"
|
r#"
|
||||||
|
@ -168,7 +168,7 @@ fn main() -> () {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn fromed_in_child_mod_not_imported() {
|
fn from_in_child_mod_not_imported() {
|
||||||
check_assist(
|
check_assist(
|
||||||
into_to_qualified_from,
|
into_to_qualified_from,
|
||||||
r#"
|
r#"
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use ide_db::imports::{
|
use ide_db::imports::{
|
||||||
insert_use::{ImportGranularity, InsertUseConfig},
|
insert_use::{ImportGranularity, InsertUseConfig},
|
||||||
merge_imports::{try_merge_imports, try_merge_trees, MergeBehavior},
|
merge_imports::{try_merge_imports, try_merge_trees, try_normalize_use_tree, MergeBehavior},
|
||||||
};
|
};
|
||||||
|
use itertools::Itertools;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
algo::neighbor,
|
algo::neighbor,
|
||||||
ast::{self, edit_in_place::Removable},
|
ast::{self, edit_in_place::Removable},
|
||||||
|
@ -32,24 +33,13 @@ use Edit::*;
|
||||||
pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||||
let (target, edits) = if ctx.has_empty_selection() {
|
let (target, edits) = if ctx.has_empty_selection() {
|
||||||
// Merge a neighbor
|
// Merge a neighbor
|
||||||
let mut tree: ast::UseTree = ctx.find_node_at_offset()?;
|
cov_mark::hit!(merge_with_use_item_neighbors);
|
||||||
if ctx.config.insert_use.granularity == ImportGranularity::One
|
let tree = ctx.find_node_at_offset::<ast::UseTree>()?.top_use_tree();
|
||||||
&& tree.parent_use_tree_list().is_some()
|
|
||||||
{
|
|
||||||
cov_mark::hit!(resolve_top_use_tree_for_import_one);
|
|
||||||
tree = tree.top_use_tree();
|
|
||||||
}
|
|
||||||
let target = tree.syntax().text_range();
|
let target = tree.syntax().text_range();
|
||||||
|
|
||||||
let edits = if let Some(use_item) = tree.syntax().parent().and_then(ast::Use::cast) {
|
let use_item = tree.syntax().parent().and_then(ast::Use::cast)?;
|
||||||
cov_mark::hit!(merge_with_use_item_neighbors);
|
let mut neighbor = next_prev().find_map(|dir| neighbor(&use_item, dir)).into_iter();
|
||||||
let mut neighbor = next_prev().find_map(|dir| neighbor(&use_item, dir)).into_iter();
|
let edits = use_item.try_merge_from(&mut neighbor, &ctx.config.insert_use);
|
||||||
use_item.try_merge_from(&mut neighbor, &ctx.config.insert_use)
|
|
||||||
} else {
|
|
||||||
cov_mark::hit!(merge_with_use_tree_neighbors);
|
|
||||||
let mut neighbor = next_prev().find_map(|dir| neighbor(&tree, dir)).into_iter();
|
|
||||||
tree.clone().try_merge_from(&mut neighbor, &ctx.config.insert_use)
|
|
||||||
};
|
|
||||||
(target, edits?)
|
(target, edits?)
|
||||||
} else {
|
} else {
|
||||||
// Merge selected
|
// Merge selected
|
||||||
|
@ -94,7 +84,35 @@ pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio
|
||||||
for edit in edits_mut {
|
for edit in edits_mut {
|
||||||
match edit {
|
match edit {
|
||||||
Remove(it) => it.as_ref().either(Removable::remove, Removable::remove),
|
Remove(it) => it.as_ref().either(Removable::remove, Removable::remove),
|
||||||
Replace(old, new) => ted::replace(old, new),
|
Replace(old, new) => {
|
||||||
|
ted::replace(old, &new);
|
||||||
|
|
||||||
|
// If there's a selection and we're replacing a use tree in a tree list,
|
||||||
|
// normalize the parent use tree if it only contains the merged subtree.
|
||||||
|
if !ctx.has_empty_selection() {
|
||||||
|
let normalized_use_tree = ast::UseTree::cast(new)
|
||||||
|
.as_ref()
|
||||||
|
.and_then(ast::UseTree::parent_use_tree_list)
|
||||||
|
.and_then(|use_tree_list| {
|
||||||
|
if use_tree_list.use_trees().collect_tuple::<(_,)>().is_some() {
|
||||||
|
Some(use_tree_list.parent_use_tree())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.and_then(|target_tree| {
|
||||||
|
try_normalize_use_tree(
|
||||||
|
&target_tree,
|
||||||
|
ctx.config.insert_use.granularity.into(),
|
||||||
|
)
|
||||||
|
.map(|top_use_tree_flat| (target_tree, top_use_tree_flat))
|
||||||
|
});
|
||||||
|
if let Some((old_tree, new_tree)) = normalized_use_tree {
|
||||||
|
cov_mark::hit!(replace_parent_with_normalized_use_tree);
|
||||||
|
ted::replace(old_tree.syntax(), new_tree.syntax());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -201,20 +219,17 @@ use std::fmt$0::{Display, Debug};
|
||||||
use std::fmt::{Display, Debug};
|
use std::fmt::{Display, Debug};
|
||||||
",
|
",
|
||||||
r"
|
r"
|
||||||
use std::fmt::{Display, Debug};
|
use std::fmt::{Debug, Display};
|
||||||
",
|
",
|
||||||
);
|
);
|
||||||
|
|
||||||
// The assist macro below calls `check_assist_import_one` 4 times with different input
|
// The assist macro below calls `check_assist_import_one` 4 times with different input
|
||||||
// use item variations based on the first 2 input parameters, but only 2 calls
|
// use item variations based on the first 2 input parameters.
|
||||||
// contain `use {std::fmt$0::{Display, Debug}};` for which the top use tree will need
|
|
||||||
// to be resolved.
|
|
||||||
cov_mark::check_count!(resolve_top_use_tree_for_import_one, 2);
|
|
||||||
cov_mark::check_count!(merge_with_use_item_neighbors, 4);
|
cov_mark::check_count!(merge_with_use_item_neighbors, 4);
|
||||||
check_assist_import_one_variations!(
|
check_assist_import_one_variations!(
|
||||||
"std::fmt$0::{Display, Debug}",
|
"std::fmt$0::{Display, Debug}",
|
||||||
"std::fmt::{Display, Debug}",
|
"std::fmt::{Display, Debug}",
|
||||||
"use {std::fmt::{Display, Debug}};"
|
"use {std::fmt::{Debug, Display}};"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -257,7 +272,7 @@ use std::fmt::{Debug, Display};
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn merge_self1() {
|
fn merge_self() {
|
||||||
check_assist(
|
check_assist(
|
||||||
merge_imports,
|
merge_imports,
|
||||||
r"
|
r"
|
||||||
|
@ -276,21 +291,8 @@ use std::fmt::{self, Display};
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn merge_self2() {
|
fn not_applicable_to_single_import() {
|
||||||
check_assist(
|
check_assist_not_applicable(merge_imports, "use std::{fmt, $0fmt::Display};");
|
||||||
merge_imports,
|
|
||||||
r"
|
|
||||||
use std::{fmt, $0fmt::Display};
|
|
||||||
",
|
|
||||||
r"
|
|
||||||
use std::{fmt::{self, Display}};
|
|
||||||
",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn not_applicable_to_single_one_style_import() {
|
|
||||||
cov_mark::check!(resolve_top_use_tree_for_import_one);
|
|
||||||
check_assist_not_applicable_for_import_one(
|
check_assist_not_applicable_for_import_one(
|
||||||
merge_imports,
|
merge_imports,
|
||||||
"use {std::{fmt, $0fmt::Display}};",
|
"use {std::{fmt, $0fmt::Display}};",
|
||||||
|
@ -385,14 +387,14 @@ pub(in this::path) use std::fmt::{Debug, Display};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_merge_nested() {
|
fn test_merge_nested() {
|
||||||
cov_mark::check!(merge_with_use_tree_neighbors);
|
|
||||||
check_assist(
|
check_assist(
|
||||||
merge_imports,
|
merge_imports,
|
||||||
r"
|
r"
|
||||||
use std::{fmt$0::Debug, fmt::Display};
|
use std::{fmt$0::Debug, fmt::Error};
|
||||||
|
use std::{fmt::Write, fmt::Display};
|
||||||
",
|
",
|
||||||
r"
|
r"
|
||||||
use std::{fmt::{Debug, Display}};
|
use std::fmt::{Debug, Display, Error, Write};
|
||||||
",
|
",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -402,10 +404,11 @@ use std::{fmt::{Debug, Display}};
|
||||||
check_assist(
|
check_assist(
|
||||||
merge_imports,
|
merge_imports,
|
||||||
r"
|
r"
|
||||||
use std::{fmt::Debug, fmt$0::Display};
|
use std::{fmt::Debug, fmt$0::Error};
|
||||||
|
use std::{fmt::Write, fmt::Display};
|
||||||
",
|
",
|
||||||
r"
|
r"
|
||||||
use std::{fmt::{Debug, Display}};
|
use std::fmt::{Debug, Display, Error, Write};
|
||||||
",
|
",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -419,13 +422,13 @@ use std$0::{fmt::{Write, Display}};
|
||||||
use std::{fmt::{self, Debug}};
|
use std::{fmt::{self, Debug}};
|
||||||
",
|
",
|
||||||
r"
|
r"
|
||||||
use std::{fmt::{self, Debug, Display, Write}};
|
use std::fmt::{self, Debug, Display, Write};
|
||||||
",
|
",
|
||||||
);
|
);
|
||||||
check_assist_import_one_variations!(
|
check_assist_import_one_variations!(
|
||||||
"std$0::{fmt::{Write, Display}}",
|
"std$0::{fmt::{Write, Display}}",
|
||||||
"std::{fmt::{self, Debug}}",
|
"std::{fmt::{self, Debug}}",
|
||||||
"use {std::{fmt::{self, Debug, Display, Write}}};"
|
"use {std::fmt::{self, Debug, Display, Write}};"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -438,26 +441,13 @@ use std$0::{fmt::{self, Debug}};
|
||||||
use std::{fmt::{Write, Display}};
|
use std::{fmt::{Write, Display}};
|
||||||
",
|
",
|
||||||
r"
|
r"
|
||||||
use std::{fmt::{self, Debug, Display, Write}};
|
use std::fmt::{self, Debug, Display, Write};
|
||||||
",
|
",
|
||||||
);
|
);
|
||||||
check_assist_import_one_variations!(
|
check_assist_import_one_variations!(
|
||||||
"std$0::{fmt::{self, Debug}}",
|
"std$0::{fmt::{self, Debug}}",
|
||||||
"std::{fmt::{Write, Display}}",
|
"std::{fmt::{Write, Display}}",
|
||||||
"use {std::{fmt::{self, Debug, Display, Write}}};"
|
"use {std::fmt::{self, Debug, Display, Write}};"
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_merge_self_with_nested_self_item() {
|
|
||||||
check_assist(
|
|
||||||
merge_imports,
|
|
||||||
r"
|
|
||||||
use std::{fmt$0::{self, Debug}, fmt::{Write, Display}};
|
|
||||||
",
|
|
||||||
r"
|
|
||||||
use std::{fmt::{self, Debug, Display, Write}};
|
|
||||||
",
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -470,13 +460,13 @@ use foo::$0{bar::{self}};
|
||||||
use foo::{bar};
|
use foo::{bar};
|
||||||
",
|
",
|
||||||
r"
|
r"
|
||||||
use foo::{bar::{self}};
|
use foo::bar;
|
||||||
",
|
",
|
||||||
);
|
);
|
||||||
check_assist_import_one_variations!(
|
check_assist_import_one_variations!(
|
||||||
"foo::$0{bar::{self}}",
|
"foo::$0{bar::{self}}",
|
||||||
"foo::{bar}",
|
"foo::{bar}",
|
||||||
"use {foo::{bar::{self}}};"
|
"use {foo::bar};"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -489,13 +479,13 @@ use foo::$0{bar};
|
||||||
use foo::{bar::{self}};
|
use foo::{bar::{self}};
|
||||||
",
|
",
|
||||||
r"
|
r"
|
||||||
use foo::{bar::{self}};
|
use foo::bar;
|
||||||
",
|
",
|
||||||
);
|
);
|
||||||
check_assist_import_one_variations!(
|
check_assist_import_one_variations!(
|
||||||
"foo::$0{bar}",
|
"foo::$0{bar}",
|
||||||
"foo::{bar::{self}}",
|
"foo::{bar::{self}}",
|
||||||
"use {foo::{bar::{self}}};"
|
"use {foo::bar};"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -508,13 +498,13 @@ use std$0::{fmt::*};
|
||||||
use std::{fmt::{self, Display}};
|
use std::{fmt::{self, Display}};
|
||||||
",
|
",
|
||||||
r"
|
r"
|
||||||
use std::{fmt::{self, Display, *}};
|
use std::fmt::{self, Display, *};
|
||||||
",
|
",
|
||||||
);
|
);
|
||||||
check_assist_import_one_variations!(
|
check_assist_import_one_variations!(
|
||||||
"std$0::{fmt::*}",
|
"std$0::{fmt::*}",
|
||||||
"std::{fmt::{self, Display}}",
|
"std::{fmt::{self, Display}}",
|
||||||
"use {std::{fmt::{self, Display, *}}};"
|
"use {std::fmt::{self, Display, *}};"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -579,29 +569,27 @@ use foo::{bar, baz};
|
||||||
check_assist(
|
check_assist(
|
||||||
merge_imports,
|
merge_imports,
|
||||||
r"
|
r"
|
||||||
use {
|
use foo$0::{
|
||||||
foo$0::bar,
|
bar, baz,
|
||||||
foo::baz,
|
|
||||||
};
|
};
|
||||||
|
use foo::qux;
|
||||||
",
|
",
|
||||||
r"
|
r"
|
||||||
use {
|
use foo::{
|
||||||
foo::{bar, baz},
|
bar, baz, qux,
|
||||||
};
|
};
|
||||||
",
|
",
|
||||||
);
|
);
|
||||||
check_assist(
|
check_assist(
|
||||||
merge_imports,
|
merge_imports,
|
||||||
r"
|
r"
|
||||||
use {
|
use foo::{
|
||||||
foo::baz,
|
baz, bar,
|
||||||
foo$0::bar,
|
|
||||||
};
|
};
|
||||||
|
use foo$0::qux;
|
||||||
",
|
",
|
||||||
r"
|
r"
|
||||||
use {
|
use foo::{bar, baz, qux};
|
||||||
foo::{bar, baz},
|
|
||||||
};
|
|
||||||
",
|
",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -711,12 +699,19 @@ use std::{
|
||||||
};",
|
};",
|
||||||
);
|
);
|
||||||
|
|
||||||
// FIXME: Remove redundant braces. See also unnecessary-braces diagnostic.
|
|
||||||
cov_mark::check!(merge_with_selected_use_tree_neighbors);
|
cov_mark::check!(merge_with_selected_use_tree_neighbors);
|
||||||
|
check_assist(
|
||||||
|
merge_imports,
|
||||||
|
r"use std::{fmt::Result, $0fmt::Display, fmt::Debug$0};",
|
||||||
|
r"use std::{fmt::Result, fmt::{Debug, Display}};",
|
||||||
|
);
|
||||||
|
|
||||||
|
cov_mark::check!(merge_with_selected_use_tree_neighbors);
|
||||||
|
cov_mark::check!(replace_parent_with_normalized_use_tree);
|
||||||
check_assist(
|
check_assist(
|
||||||
merge_imports,
|
merge_imports,
|
||||||
r"use std::$0{fmt::Display, fmt::Debug}$0;",
|
r"use std::$0{fmt::Display, fmt::Debug}$0;",
|
||||||
r"use std::{fmt::{Debug, Display}};",
|
r"use std::fmt::{Debug, Display};",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
219
crates/ide-assists/src/handlers/normalize_import.rs
Normal file
219
crates/ide-assists/src/handlers/normalize_import.rs
Normal file
|
@ -0,0 +1,219 @@
|
||||||
|
use ide_db::imports::merge_imports::try_normalize_import;
|
||||||
|
use syntax::{ast, AstNode};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
assist_context::{AssistContext, Assists},
|
||||||
|
AssistId, AssistKind,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Assist: normalize_import
|
||||||
|
//
|
||||||
|
// Normalizes an import.
|
||||||
|
//
|
||||||
|
// ```
|
||||||
|
// use$0 std::{io, {fmt::Formatter}};
|
||||||
|
// ```
|
||||||
|
// ->
|
||||||
|
// ```
|
||||||
|
// use std::{fmt::Formatter, io};
|
||||||
|
// ```
|
||||||
|
pub(crate) fn normalize_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||||
|
let use_item = if ctx.has_empty_selection() {
|
||||||
|
ctx.find_node_at_offset()?
|
||||||
|
} else {
|
||||||
|
ctx.covering_element().ancestors().find_map(ast::Use::cast)?
|
||||||
|
};
|
||||||
|
|
||||||
|
let target = use_item.syntax().text_range();
|
||||||
|
let normalized_use_item =
|
||||||
|
try_normalize_import(&use_item, ctx.config.insert_use.granularity.into())?;
|
||||||
|
|
||||||
|
acc.add(
|
||||||
|
AssistId("normalize_import", AssistKind::RefactorRewrite),
|
||||||
|
"Normalize import",
|
||||||
|
target,
|
||||||
|
|builder| {
|
||||||
|
builder.replace_ast(use_item, normalized_use_item);
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::tests::{
|
||||||
|
check_assist, check_assist_import_one, check_assist_not_applicable,
|
||||||
|
check_assist_not_applicable_for_import_one,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
macro_rules! check_assist_variations {
|
||||||
|
($fixture: literal, $expected: literal) => {
|
||||||
|
check_assist(
|
||||||
|
normalize_import,
|
||||||
|
concat!("use $0", $fixture, ";"),
|
||||||
|
concat!("use ", $expected, ";"),
|
||||||
|
);
|
||||||
|
check_assist(
|
||||||
|
normalize_import,
|
||||||
|
concat!("$0use ", $fixture, ";"),
|
||||||
|
concat!("use ", $expected, ";"),
|
||||||
|
);
|
||||||
|
|
||||||
|
check_assist_import_one(
|
||||||
|
normalize_import,
|
||||||
|
concat!("use $0", $fixture, ";"),
|
||||||
|
concat!("use {", $expected, "};"),
|
||||||
|
);
|
||||||
|
check_assist_import_one(
|
||||||
|
normalize_import,
|
||||||
|
concat!("$0use ", $fixture, ";"),
|
||||||
|
concat!("use {", $expected, "};"),
|
||||||
|
);
|
||||||
|
|
||||||
|
check_assist_import_one(
|
||||||
|
normalize_import,
|
||||||
|
concat!("use $0{", $fixture, "};"),
|
||||||
|
concat!("use {", $expected, "};"),
|
||||||
|
);
|
||||||
|
check_assist_import_one(
|
||||||
|
normalize_import,
|
||||||
|
concat!("$0use {", $fixture, "};"),
|
||||||
|
concat!("use {", $expected, "};"),
|
||||||
|
);
|
||||||
|
|
||||||
|
check_assist(
|
||||||
|
normalize_import,
|
||||||
|
concat!("use $0", $fixture, "$0;"),
|
||||||
|
concat!("use ", $expected, ";"),
|
||||||
|
);
|
||||||
|
check_assist(
|
||||||
|
normalize_import,
|
||||||
|
concat!("$0use ", $fixture, ";$0"),
|
||||||
|
concat!("use ", $expected, ";"),
|
||||||
|
);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! check_assist_not_applicable_variations {
|
||||||
|
($fixture: literal) => {
|
||||||
|
check_assist_not_applicable(normalize_import, concat!("use $0", $fixture, ";"));
|
||||||
|
check_assist_not_applicable(normalize_import, concat!("$0use ", $fixture, ";"));
|
||||||
|
|
||||||
|
check_assist_not_applicable_for_import_one(
|
||||||
|
normalize_import,
|
||||||
|
concat!("use $0{", $fixture, "};"),
|
||||||
|
);
|
||||||
|
check_assist_not_applicable_for_import_one(
|
||||||
|
normalize_import,
|
||||||
|
concat!("$0use {", $fixture, "};"),
|
||||||
|
);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_order() {
|
||||||
|
check_assist_variations!(
|
||||||
|
"foo::{*, Qux, bar::{Quux, Bar}, baz, FOO_BAZ, self, Baz}",
|
||||||
|
"foo::{self, bar::{Bar, Quux}, baz, Baz, Qux, FOO_BAZ, *}"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_redundant_braces() {
|
||||||
|
check_assist_variations!("foo::{bar::{baz, Qux}}", "foo::bar::{baz, Qux}");
|
||||||
|
check_assist_variations!("foo::{bar::{self}}", "foo::bar");
|
||||||
|
check_assist_variations!("foo::{bar::{*}}", "foo::bar::*");
|
||||||
|
check_assist_variations!("foo::{bar::{Qux as Quux}}", "foo::bar::Qux as Quux");
|
||||||
|
check_assist_variations!(
|
||||||
|
"foo::bar::{{FOO_BAZ, Qux, self}, {*, baz}}",
|
||||||
|
"foo::bar::{self, baz, Qux, FOO_BAZ, *}"
|
||||||
|
);
|
||||||
|
check_assist_variations!(
|
||||||
|
"foo::bar::{{{FOO_BAZ}, {{Qux}, {self}}}, {{*}, {baz}}}",
|
||||||
|
"foo::bar::{self, baz, Qux, FOO_BAZ, *}"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_merge() {
|
||||||
|
check_assist_variations!(
|
||||||
|
"foo::{*, bar, {FOO_BAZ, qux}, bar::{*, baz}, {Quux}}",
|
||||||
|
"foo::{bar::{self, baz, *}, qux, Quux, FOO_BAZ, *}"
|
||||||
|
);
|
||||||
|
check_assist_variations!(
|
||||||
|
"foo::{*, bar, {FOO_BAZ, qux}, bar::{*, baz}, {Quux, bar::{baz::Foo}}}",
|
||||||
|
"foo::{bar::{self, baz::{self, Foo}, *}, qux, Quux, FOO_BAZ, *}"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_merge_self() {
|
||||||
|
check_assist_variations!("std::{fmt, fmt::Display}", "std::fmt::{self, Display}");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_merge_nested() {
|
||||||
|
check_assist_variations!("std::{fmt::Debug, fmt::Display}", "std::fmt::{Debug, Display}");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_merge_nested2() {
|
||||||
|
check_assist_variations!("std::{fmt::Debug, fmt::Display}", "std::fmt::{Debug, Display}");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_merge_self_with_nested_self_item() {
|
||||||
|
check_assist_variations!(
|
||||||
|
"std::{fmt::{self, Debug}, fmt::{Write, Display}}",
|
||||||
|
"std::fmt::{self, Debug, Display, Write}"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn works_with_trailing_comma() {
|
||||||
|
check_assist(
|
||||||
|
normalize_import,
|
||||||
|
r"
|
||||||
|
use $0{
|
||||||
|
foo::bar,
|
||||||
|
foo::baz,
|
||||||
|
};
|
||||||
|
",
|
||||||
|
r"
|
||||||
|
use foo::{bar, baz};
|
||||||
|
",
|
||||||
|
);
|
||||||
|
check_assist_import_one(
|
||||||
|
normalize_import,
|
||||||
|
r"
|
||||||
|
use $0{
|
||||||
|
foo::bar,
|
||||||
|
foo::baz,
|
||||||
|
};
|
||||||
|
",
|
||||||
|
r"
|
||||||
|
use {
|
||||||
|
foo::{bar, baz},
|
||||||
|
};
|
||||||
|
",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn not_applicable_to_normalized_import() {
|
||||||
|
check_assist_not_applicable_variations!("foo::bar");
|
||||||
|
check_assist_not_applicable_variations!("foo::bar::*");
|
||||||
|
check_assist_not_applicable_variations!("foo::bar::Qux as Quux");
|
||||||
|
check_assist_not_applicable_variations!("foo::bar::{self, baz, Qux, FOO_BAZ, *}");
|
||||||
|
check_assist_not_applicable_variations!(
|
||||||
|
"foo::{self, bar::{Bar, Quux}, baz, Baz, Qux, FOO_BAZ, *}"
|
||||||
|
);
|
||||||
|
check_assist_not_applicable_variations!(
|
||||||
|
"foo::{bar::{self, baz, *}, qux, Quux, FOO_BAZ, *}"
|
||||||
|
);
|
||||||
|
check_assist_not_applicable_variations!(
|
||||||
|
"foo::{bar::{self, baz::{self, Foo}, *}, qux, Quux, FOO_BAZ, *}"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
|
@ -116,11 +116,9 @@ trait AddRewrite {
|
||||||
new: Vec<T>,
|
new: Vec<T>,
|
||||||
target: TextRange,
|
target: TextRange,
|
||||||
) -> Option<()>;
|
) -> Option<()>;
|
||||||
fn yeet() {}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AddRewrite for Assists {
|
impl AddRewrite for Assists {
|
||||||
fn yeet() {}
|
|
||||||
fn add_rewrite<T: AstNode>(
|
fn add_rewrite<T: AstNode>(
|
||||||
&mut self,
|
&mut self,
|
||||||
label: &str,
|
label: &str,
|
||||||
|
|
|
@ -111,6 +111,8 @@ mod handlers {
|
||||||
mod add_label_to_loop;
|
mod add_label_to_loop;
|
||||||
mod add_lifetime_to_type;
|
mod add_lifetime_to_type;
|
||||||
mod add_missing_impl_members;
|
mod add_missing_impl_members;
|
||||||
|
mod add_missing_match_arms;
|
||||||
|
mod add_return_type;
|
||||||
mod add_turbo_fish;
|
mod add_turbo_fish;
|
||||||
mod apply_demorgan;
|
mod apply_demorgan;
|
||||||
mod auto_import;
|
mod auto_import;
|
||||||
|
@ -124,15 +126,15 @@ mod handlers {
|
||||||
mod convert_iter_for_each_to_for;
|
mod convert_iter_for_each_to_for;
|
||||||
mod convert_let_else_to_match;
|
mod convert_let_else_to_match;
|
||||||
mod convert_match_to_let_else;
|
mod convert_match_to_let_else;
|
||||||
|
mod convert_named_struct_to_tuple_struct;
|
||||||
mod convert_nested_function_to_closure;
|
mod convert_nested_function_to_closure;
|
||||||
|
mod convert_to_guarded_return;
|
||||||
mod convert_tuple_return_type_to_struct;
|
mod convert_tuple_return_type_to_struct;
|
||||||
mod convert_tuple_struct_to_named_struct;
|
mod convert_tuple_struct_to_named_struct;
|
||||||
mod convert_named_struct_to_tuple_struct;
|
|
||||||
mod convert_to_guarded_return;
|
|
||||||
mod convert_two_arm_bool_match_to_matches_macro;
|
mod convert_two_arm_bool_match_to_matches_macro;
|
||||||
mod convert_while_to_loop;
|
mod convert_while_to_loop;
|
||||||
mod desugar_doc_comment;
|
|
||||||
mod destructure_tuple_binding;
|
mod destructure_tuple_binding;
|
||||||
|
mod desugar_doc_comment;
|
||||||
mod expand_glob_import;
|
mod expand_glob_import;
|
||||||
mod extract_expressions_from_format_string;
|
mod extract_expressions_from_format_string;
|
||||||
mod extract_function;
|
mod extract_function;
|
||||||
|
@ -140,7 +142,6 @@ mod handlers {
|
||||||
mod extract_struct_from_enum_variant;
|
mod extract_struct_from_enum_variant;
|
||||||
mod extract_type_alias;
|
mod extract_type_alias;
|
||||||
mod extract_variable;
|
mod extract_variable;
|
||||||
mod add_missing_match_arms;
|
|
||||||
mod fix_visibility;
|
mod fix_visibility;
|
||||||
mod flip_binexpr;
|
mod flip_binexpr;
|
||||||
mod flip_comma;
|
mod flip_comma;
|
||||||
|
@ -148,6 +149,7 @@ mod handlers {
|
||||||
mod generate_constant;
|
mod generate_constant;
|
||||||
mod generate_default_from_enum_variant;
|
mod generate_default_from_enum_variant;
|
||||||
mod generate_default_from_new;
|
mod generate_default_from_new;
|
||||||
|
mod generate_delegate_methods;
|
||||||
mod generate_delegate_trait;
|
mod generate_delegate_trait;
|
||||||
mod generate_deref;
|
mod generate_deref;
|
||||||
mod generate_derive;
|
mod generate_derive;
|
||||||
|
@ -162,62 +164,61 @@ mod handlers {
|
||||||
mod generate_is_empty_from_len;
|
mod generate_is_empty_from_len;
|
||||||
mod generate_mut_trait_impl;
|
mod generate_mut_trait_impl;
|
||||||
mod generate_new;
|
mod generate_new;
|
||||||
mod generate_delegate_methods;
|
|
||||||
mod generate_trait_from_impl;
|
mod generate_trait_from_impl;
|
||||||
mod add_return_type;
|
|
||||||
mod inline_call;
|
mod inline_call;
|
||||||
mod inline_const_as_literal;
|
mod inline_const_as_literal;
|
||||||
mod inline_local_variable;
|
mod inline_local_variable;
|
||||||
mod inline_macro;
|
mod inline_macro;
|
||||||
mod inline_type_alias;
|
mod inline_type_alias;
|
||||||
|
mod into_to_qualified_from;
|
||||||
|
mod introduce_named_generic;
|
||||||
mod introduce_named_lifetime;
|
mod introduce_named_lifetime;
|
||||||
mod invert_if;
|
mod invert_if;
|
||||||
mod merge_imports;
|
mod merge_imports;
|
||||||
mod merge_match_arms;
|
mod merge_match_arms;
|
||||||
|
mod merge_nested_if;
|
||||||
mod move_bounds;
|
mod move_bounds;
|
||||||
mod move_const_to_impl;
|
mod move_const_to_impl;
|
||||||
|
mod move_from_mod_rs;
|
||||||
mod move_guard;
|
mod move_guard;
|
||||||
mod move_module_to_file;
|
mod move_module_to_file;
|
||||||
mod move_to_mod_rs;
|
mod move_to_mod_rs;
|
||||||
mod move_from_mod_rs;
|
mod normalize_import;
|
||||||
mod number_representation;
|
mod number_representation;
|
||||||
mod promote_local_to_const;
|
mod promote_local_to_const;
|
||||||
mod pull_assignment_up;
|
mod pull_assignment_up;
|
||||||
mod qualify_path;
|
|
||||||
mod qualify_method_call;
|
mod qualify_method_call;
|
||||||
|
mod qualify_path;
|
||||||
mod raw_string;
|
mod raw_string;
|
||||||
mod remove_dbg;
|
mod remove_dbg;
|
||||||
mod remove_mut;
|
mod remove_mut;
|
||||||
|
mod remove_parentheses;
|
||||||
mod remove_unused_imports;
|
mod remove_unused_imports;
|
||||||
mod remove_unused_param;
|
mod remove_unused_param;
|
||||||
mod remove_parentheses;
|
|
||||||
mod reorder_fields;
|
mod reorder_fields;
|
||||||
mod reorder_impl_items;
|
mod reorder_impl_items;
|
||||||
mod replace_try_expr_with_match;
|
mod replace_arith_op;
|
||||||
mod replace_derive_with_manual_impl;
|
mod replace_derive_with_manual_impl;
|
||||||
mod replace_if_let_with_match;
|
mod replace_if_let_with_match;
|
||||||
mod replace_is_method_with_if_let_method;
|
mod replace_is_method_with_if_let_method;
|
||||||
mod replace_method_eager_lazy;
|
|
||||||
mod replace_arith_op;
|
|
||||||
mod introduce_named_generic;
|
|
||||||
mod replace_let_with_if_let;
|
mod replace_let_with_if_let;
|
||||||
|
mod replace_method_eager_lazy;
|
||||||
mod replace_named_generic_with_impl;
|
mod replace_named_generic_with_impl;
|
||||||
mod replace_qualified_name_with_use;
|
mod replace_qualified_name_with_use;
|
||||||
mod replace_string_with_char;
|
mod replace_string_with_char;
|
||||||
|
mod replace_try_expr_with_match;
|
||||||
mod replace_turbofish_with_explicit_type;
|
mod replace_turbofish_with_explicit_type;
|
||||||
mod split_import;
|
|
||||||
mod unmerge_match_arm;
|
|
||||||
mod unwrap_tuple;
|
|
||||||
mod sort_items;
|
mod sort_items;
|
||||||
|
mod split_import;
|
||||||
mod toggle_ignore;
|
mod toggle_ignore;
|
||||||
|
mod unmerge_match_arm;
|
||||||
mod unmerge_use;
|
mod unmerge_use;
|
||||||
mod unnecessary_async;
|
mod unnecessary_async;
|
||||||
|
mod unqualify_method_call;
|
||||||
mod unwrap_block;
|
mod unwrap_block;
|
||||||
mod unwrap_result_return_type;
|
mod unwrap_result_return_type;
|
||||||
mod unqualify_method_call;
|
mod unwrap_tuple;
|
||||||
mod wrap_return_type_in_result;
|
mod wrap_return_type_in_result;
|
||||||
mod into_to_qualified_from;
|
|
||||||
mod merge_nested_if;
|
|
||||||
|
|
||||||
pub(crate) fn all() -> &'static [Handler] {
|
pub(crate) fn all() -> &'static [Handler] {
|
||||||
&[
|
&[
|
||||||
|
@ -300,6 +301,7 @@ mod handlers {
|
||||||
move_module_to_file::move_module_to_file,
|
move_module_to_file::move_module_to_file,
|
||||||
move_to_mod_rs::move_to_mod_rs,
|
move_to_mod_rs::move_to_mod_rs,
|
||||||
move_from_mod_rs::move_from_mod_rs,
|
move_from_mod_rs::move_from_mod_rs,
|
||||||
|
normalize_import::normalize_import,
|
||||||
number_representation::reformat_number_literal,
|
number_representation::reformat_number_literal,
|
||||||
pull_assignment_up::pull_assignment_up,
|
pull_assignment_up::pull_assignment_up,
|
||||||
promote_local_to_const::promote_local_to_const,
|
promote_local_to_const::promote_local_to_const,
|
||||||
|
|
|
@ -2217,6 +2217,19 @@ fn t() {}
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn doctest_normalize_import() {
|
||||||
|
check_doc_test(
|
||||||
|
"normalize_import",
|
||||||
|
r#####"
|
||||||
|
use$0 std::{io, {fmt::Formatter}};
|
||||||
|
"#####,
|
||||||
|
r#####"
|
||||||
|
use std::{fmt::Formatter, io};
|
||||||
|
"#####,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn doctest_promote_local_to_const() {
|
fn doctest_promote_local_to_const() {
|
||||||
check_doc_test(
|
check_doc_test(
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue