mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-09 03:38:47 +00:00
Auto merge of #3755 - rust-lang:rustup-2024-07-20, r=RalfJung
Automatic Rustup
This commit is contained in:
commit
29cb173b47
275 changed files with 7403 additions and 4464 deletions
22
.github/workflows/release.yaml
vendored
22
.github/workflows/release.yaml
vendored
|
@ -132,7 +132,7 @@ jobs:
|
||||||
run: target/${{ matrix.target }}/release/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std
|
run: target/${{ matrix.target }}/release/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v1
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: dist-${{ matrix.target }}
|
name: dist-${{ matrix.target }}
|
||||||
path: ./dist
|
path: ./dist
|
||||||
|
@ -177,7 +177,7 @@ jobs:
|
||||||
- run: rm -rf editors/code/server
|
- run: rm -rf editors/code/server
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v1
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: dist-x86_64-unknown-linux-musl
|
name: dist-x86_64-unknown-linux-musl
|
||||||
path: ./dist
|
path: ./dist
|
||||||
|
@ -206,39 +206,39 @@ jobs:
|
||||||
- run: echo "HEAD_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
- run: echo "HEAD_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
||||||
- run: 'echo "HEAD_SHA: $HEAD_SHA"'
|
- run: 'echo "HEAD_SHA: $HEAD_SHA"'
|
||||||
|
|
||||||
- uses: actions/download-artifact@v1
|
- uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: dist-aarch64-apple-darwin
|
name: dist-aarch64-apple-darwin
|
||||||
path: dist
|
path: dist
|
||||||
- uses: actions/download-artifact@v1
|
- uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: dist-x86_64-apple-darwin
|
name: dist-x86_64-apple-darwin
|
||||||
path: dist
|
path: dist
|
||||||
- uses: actions/download-artifact@v1
|
- uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: dist-x86_64-unknown-linux-gnu
|
name: dist-x86_64-unknown-linux-gnu
|
||||||
path: dist
|
path: dist
|
||||||
- uses: actions/download-artifact@v1
|
- uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: dist-x86_64-unknown-linux-musl
|
name: dist-x86_64-unknown-linux-musl
|
||||||
path: dist
|
path: dist
|
||||||
- uses: actions/download-artifact@v1
|
- uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: dist-aarch64-unknown-linux-gnu
|
name: dist-aarch64-unknown-linux-gnu
|
||||||
path: dist
|
path: dist
|
||||||
- uses: actions/download-artifact@v1
|
- uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: dist-arm-unknown-linux-gnueabihf
|
name: dist-arm-unknown-linux-gnueabihf
|
||||||
path: dist
|
path: dist
|
||||||
- uses: actions/download-artifact@v1
|
- uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: dist-x86_64-pc-windows-msvc
|
name: dist-x86_64-pc-windows-msvc
|
||||||
path: dist
|
path: dist
|
||||||
- uses: actions/download-artifact@v1
|
- uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: dist-i686-pc-windows-msvc
|
name: dist-i686-pc-windows-msvc
|
||||||
path: dist
|
path: dist
|
||||||
- uses: actions/download-artifact@v1
|
- uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: dist-aarch64-pc-windows-msvc
|
name: dist-aarch64-pc-windows-msvc
|
||||||
path: dist
|
path: dist
|
||||||
|
|
85
Cargo.lock
generated
85
Cargo.lock
generated
|
@ -167,9 +167,9 @@ checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "chalk-derive"
|
name = "chalk-derive"
|
||||||
version = "0.97.0"
|
version = "0.98.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "92a0aedc4ac2adc5c0b7dc9ec38c5c816284ad28da6d4ecd01873b9683f54972"
|
checksum = "9426c8fd0fe61c3da880b801d3b510524df17843a8f9ec1f5b9cec24fb7412df"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
@ -179,9 +179,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "chalk-ir"
|
name = "chalk-ir"
|
||||||
version = "0.97.0"
|
version = "0.98.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "db18493569b190f7266a04901e520fc3a5c00564475154287906f8a27302c119"
|
checksum = "d5f2eb1cd6054da221bd1ac0197fb2fe5e2caf3dcb93619398fc1433f8f09093"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.5.0",
|
"bitflags 2.5.0",
|
||||||
"chalk-derive",
|
"chalk-derive",
|
||||||
|
@ -189,9 +189,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "chalk-recursive"
|
name = "chalk-recursive"
|
||||||
version = "0.97.0"
|
version = "0.98.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ae4ba8ce5bd2e1b59f1f79495bc8704db09a8285e51cc5ddf01d9baee1bf447d"
|
checksum = "129dc03458f71cfb9c3cd621c9c68166a94e87b85b16ccd29af015d7ff9a1c61"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"chalk-derive",
|
"chalk-derive",
|
||||||
"chalk-ir",
|
"chalk-ir",
|
||||||
|
@ -202,9 +202,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "chalk-solve"
|
name = "chalk-solve"
|
||||||
version = "0.97.0"
|
version = "0.98.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b2ec1b3b7f7b1ec38f099ef39c2bc3ea29335be1b8316d114baff46d96d131e9"
|
checksum = "d7e8a8c1e928f98cdf227b868416ef21dcd8cc3c61b347576d783713444d41c8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"chalk-derive",
|
"chalk-derive",
|
||||||
"chalk-ir",
|
"chalk-ir",
|
||||||
|
@ -221,11 +221,6 @@ name = "countme"
|
||||||
version = "3.0.1"
|
version = "3.0.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636"
|
checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636"
|
||||||
dependencies = [
|
|
||||||
"dashmap",
|
|
||||||
"once_cell",
|
|
||||||
"rustc-hash",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cov-mark"
|
name = "cov-mark"
|
||||||
|
@ -548,10 +543,10 @@ dependencies = [
|
||||||
"limit",
|
"limit",
|
||||||
"mbe",
|
"mbe",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"profile",
|
|
||||||
"ra-ap-rustc_abi",
|
"ra-ap-rustc_abi",
|
||||||
"ra-ap-rustc_parse_format",
|
"ra-ap-rustc_parse_format",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
|
"rustc_apfloat",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"span",
|
"span",
|
||||||
"stdx",
|
"stdx",
|
||||||
|
@ -616,9 +611,10 @@ dependencies = [
|
||||||
"oorandom",
|
"oorandom",
|
||||||
"project-model",
|
"project-model",
|
||||||
"ra-ap-rustc_abi",
|
"ra-ap-rustc_abi",
|
||||||
"ra-ap-rustc_index 0.53.0",
|
"ra-ap-rustc_index",
|
||||||
"ra-ap-rustc_pattern_analysis",
|
"ra-ap-rustc_pattern_analysis",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
|
"rustc_apfloat",
|
||||||
"scoped-tls",
|
"scoped-tls",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"span",
|
"span",
|
||||||
|
@ -664,6 +660,7 @@ dependencies = [
|
||||||
"profile",
|
"profile",
|
||||||
"pulldown-cmark",
|
"pulldown-cmark",
|
||||||
"pulldown-cmark-to-cmark",
|
"pulldown-cmark-to-cmark",
|
||||||
|
"rustc_apfloat",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"span",
|
"span",
|
||||||
"stdx",
|
"stdx",
|
||||||
|
@ -809,7 +806,6 @@ checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"equivalent",
|
"equivalent",
|
||||||
"hashbrown",
|
"hashbrown",
|
||||||
"serde",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1046,6 +1042,7 @@ checksum = "75761162ae2b0e580d7e7c390558127e5f01b4194debd6221fd8c207fc80e3f5"
|
||||||
name = "mbe"
|
name = "mbe"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"arrayvec",
|
||||||
"cov-mark",
|
"cov-mark",
|
||||||
"parser",
|
"parser",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
|
@ -1250,7 +1247,6 @@ dependencies = [
|
||||||
"expect-test",
|
"expect-test",
|
||||||
"limit",
|
"limit",
|
||||||
"ra-ap-rustc_lexer",
|
"ra-ap-rustc_lexer",
|
||||||
"sourcegen",
|
|
||||||
"stdx",
|
"stdx",
|
||||||
"tracing",
|
"tracing",
|
||||||
]
|
]
|
||||||
|
@ -1328,18 +1324,14 @@ dependencies = [
|
||||||
"base-db",
|
"base-db",
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"memmap2",
|
|
||||||
"object 0.33.0",
|
|
||||||
"paths",
|
"paths",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"snap",
|
|
||||||
"span",
|
"span",
|
||||||
"stdx",
|
"stdx",
|
||||||
"text-size",
|
"text-size",
|
||||||
"tracing",
|
"tracing",
|
||||||
"triomphe",
|
|
||||||
"tt",
|
"tt",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1357,6 +1349,7 @@ dependencies = [
|
||||||
"proc-macro-api",
|
"proc-macro-api",
|
||||||
"proc-macro-test",
|
"proc-macro-test",
|
||||||
"ra-ap-rustc_lexer",
|
"ra-ap-rustc_lexer",
|
||||||
|
"snap",
|
||||||
"span",
|
"span",
|
||||||
"stdx",
|
"stdx",
|
||||||
"tt",
|
"tt",
|
||||||
|
@ -1403,13 +1396,9 @@ name = "profile"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"countme",
|
|
||||||
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"libc",
|
"libc",
|
||||||
"once_cell",
|
|
||||||
"perf-event",
|
"perf-event",
|
||||||
"tikv-jemalloc-ctl",
|
"tikv-jemalloc-ctl",
|
||||||
"tracing",
|
|
||||||
"windows-sys 0.52.0",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1492,21 +1481,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "80b1d613eee933486c0613a7bc26e515e46f43adf479d1edd5e537f983e9ce46"
|
checksum = "80b1d613eee933486c0613a7bc26e515e46f43adf479d1edd5e537f983e9ce46"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.5.0",
|
"bitflags 2.5.0",
|
||||||
"ra-ap-rustc_index 0.53.0",
|
"ra-ap-rustc_index",
|
||||||
"tracing",
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "ra-ap-rustc_index"
|
|
||||||
version = "0.44.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "9ad68bacffb87dcdbb23a3ce11261375078aaa06b85d348c49f39ffd5510dc20"
|
|
||||||
dependencies = [
|
|
||||||
"arrayvec",
|
|
||||||
"ra-ap-rustc_index_macros 0.44.0",
|
|
||||||
"smallvec",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ra-ap-rustc_index"
|
name = "ra-ap-rustc_index"
|
||||||
version = "0.53.0"
|
version = "0.53.0"
|
||||||
|
@ -1514,22 +1492,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f072060ac77e9e1a02cc20028095993af7e72cc0804779c68bcbf47b16de49c9"
|
checksum = "f072060ac77e9e1a02cc20028095993af7e72cc0804779c68bcbf47b16de49c9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrayvec",
|
"arrayvec",
|
||||||
"ra-ap-rustc_index_macros 0.53.0",
|
"ra-ap-rustc_index_macros",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "ra-ap-rustc_index_macros"
|
|
||||||
version = "0.44.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "8782aaf3a113837c533dfb1c45df91cd17e1fdd1d2f9a20c2e0d1976025c4f1f"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
"synstructure",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ra-ap-rustc_index_macros"
|
name = "ra-ap-rustc_index_macros"
|
||||||
version = "0.53.0"
|
version = "0.53.0"
|
||||||
|
@ -1558,17 +1524,17 @@ version = "0.53.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "70dad7a491c2554590222e0c9212dcb7c2e7aceb668875075012a35ea780d135"
|
checksum = "70dad7a491c2554590222e0c9212dcb7c2e7aceb668875075012a35ea780d135"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ra-ap-rustc_index 0.53.0",
|
"ra-ap-rustc_index",
|
||||||
"ra-ap-rustc_lexer",
|
"ra-ap-rustc_lexer",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ra-ap-rustc_pattern_analysis"
|
name = "ra-ap-rustc_pattern_analysis"
|
||||||
version = "0.44.0"
|
version = "0.53.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d63d1e1d5b2a13273cee1a10011147418f40e12b70f70578ce1dee0f1cafc334"
|
checksum = "34768e1faf88c31f2e9ad57b48318a52b507dafac0cddbf01b5d63bfc0b0a365"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ra-ap-rustc_index 0.44.0",
|
"ra-ap-rustc_index",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
"rustc_apfloat",
|
"rustc_apfloat",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
|
@ -1685,7 +1651,6 @@ dependencies = [
|
||||||
"ide",
|
"ide",
|
||||||
"ide-db",
|
"ide-db",
|
||||||
"ide-ssr",
|
"ide-ssr",
|
||||||
"indexmap",
|
|
||||||
"itertools",
|
"itertools",
|
||||||
"load-cargo",
|
"load-cargo",
|
||||||
"lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -1708,7 +1673,6 @@ dependencies = [
|
||||||
"semver",
|
"semver",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"sourcegen",
|
|
||||||
"stdx",
|
"stdx",
|
||||||
"syntax",
|
"syntax",
|
||||||
"test-fixture",
|
"test-fixture",
|
||||||
|
@ -1907,13 +1871,6 @@ version = "1.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1b6b67fb9a61334225b5b790716f609cd58395f895b3fe8b328786812a40bc3b"
|
checksum = "1b6b67fb9a61334225b5b790716f609cd58395f895b3fe8b328786812a40bc3b"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "sourcegen"
|
|
||||||
version = "0.0.0"
|
|
||||||
dependencies = [
|
|
||||||
"xshell",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "span"
|
name = "span"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
|
@ -1985,6 +1942,7 @@ dependencies = [
|
||||||
"rayon",
|
"rayon",
|
||||||
"rowan",
|
"rowan",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
|
"rustc_apfloat",
|
||||||
"smol_str",
|
"smol_str",
|
||||||
"stdx",
|
"stdx",
|
||||||
"test-utils",
|
"test-utils",
|
||||||
|
@ -2251,6 +2209,7 @@ dependencies = [
|
||||||
name = "tt"
|
name = "tt"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"arrayvec",
|
||||||
"smol_str",
|
"smol_str",
|
||||||
"stdx",
|
"stdx",
|
||||||
"text-size",
|
"text-size",
|
||||||
|
|
19
Cargo.toml
19
Cargo.toml
|
@ -10,9 +10,7 @@ license = "MIT OR Apache-2.0"
|
||||||
authors = ["rust-analyzer team"]
|
authors = ["rust-analyzer team"]
|
||||||
|
|
||||||
[profile.dev]
|
[profile.dev]
|
||||||
# Disabling debug info speeds up builds a bunch,
|
debug = 1
|
||||||
# and we don't rely on it for debugging that much.
|
|
||||||
debug = 0
|
|
||||||
|
|
||||||
[profile.dev.package]
|
[profile.dev.package]
|
||||||
# These speed up local tests.
|
# These speed up local tests.
|
||||||
|
@ -89,10 +87,9 @@ ra-ap-rustc_lexer = { version = "0.53.0", default-features = false }
|
||||||
ra-ap-rustc_parse_format = { version = "0.53.0", default-features = false }
|
ra-ap-rustc_parse_format = { version = "0.53.0", default-features = false }
|
||||||
ra-ap-rustc_index = { version = "0.53.0", default-features = false }
|
ra-ap-rustc_index = { version = "0.53.0", default-features = false }
|
||||||
ra-ap-rustc_abi = { version = "0.53.0", default-features = false }
|
ra-ap-rustc_abi = { version = "0.53.0", default-features = false }
|
||||||
ra-ap-rustc_pattern_analysis = { version = "0.44.0", default-features = false }
|
ra-ap-rustc_pattern_analysis = { version = "0.53.0", default-features = false }
|
||||||
|
|
||||||
# local crates that aren't published to crates.io. These should not have versions.
|
# local crates that aren't published to crates.io. These should not have versions.
|
||||||
sourcegen = { path = "./crates/sourcegen" }
|
|
||||||
test-fixture = { path = "./crates/test-fixture" }
|
test-fixture = { path = "./crates/test-fixture" }
|
||||||
test-utils = { path = "./crates/test-utils" }
|
test-utils = { path = "./crates/test-utils" }
|
||||||
|
|
||||||
|
@ -107,10 +104,10 @@ arrayvec = "0.7.4"
|
||||||
bitflags = "2.4.1"
|
bitflags = "2.4.1"
|
||||||
cargo_metadata = "0.18.1"
|
cargo_metadata = "0.18.1"
|
||||||
camino = "1.1.6"
|
camino = "1.1.6"
|
||||||
chalk-solve = { version = "0.97.0", default-features = false }
|
chalk-solve = { version = "0.98.0", default-features = false }
|
||||||
chalk-ir = "0.97.0"
|
chalk-ir = "0.98.0"
|
||||||
chalk-recursive = { version = "0.97.0", default-features = false }
|
chalk-recursive = { version = "0.98.0", default-features = false }
|
||||||
chalk-derive = "0.97.0"
|
chalk-derive = "0.98.0"
|
||||||
crossbeam-channel = "0.5.8"
|
crossbeam-channel = "0.5.8"
|
||||||
dissimilar = "1.0.7"
|
dissimilar = "1.0.7"
|
||||||
dot = "0.1.4"
|
dot = "0.1.4"
|
||||||
|
@ -122,6 +119,8 @@ hashbrown = { version = "0.14", features = [
|
||||||
indexmap = "2.1.0"
|
indexmap = "2.1.0"
|
||||||
itertools = "0.12.0"
|
itertools = "0.12.0"
|
||||||
libc = "0.2.150"
|
libc = "0.2.150"
|
||||||
|
libloading = "0.8.0"
|
||||||
|
memmap2 = "0.5.4"
|
||||||
nohash-hasher = "0.2.0"
|
nohash-hasher = "0.2.0"
|
||||||
oorandom = "11.1.3"
|
oorandom = "11.1.3"
|
||||||
object = { version = "0.33.0", default-features = false, features = [
|
object = { version = "0.33.0", default-features = false, features = [
|
||||||
|
@ -145,6 +144,7 @@ smallvec = { version = "1.10.0", features = [
|
||||||
"const_generics",
|
"const_generics",
|
||||||
] }
|
] }
|
||||||
smol_str = "0.2.1"
|
smol_str = "0.2.1"
|
||||||
|
snap = "1.1.0"
|
||||||
text-size = "1.1.1"
|
text-size = "1.1.1"
|
||||||
tracing = "0.1.40"
|
tracing = "0.1.40"
|
||||||
tracing-tree = "0.3.0"
|
tracing-tree = "0.3.0"
|
||||||
|
@ -158,6 +158,7 @@ url = "2.3.1"
|
||||||
xshell = "0.2.5"
|
xshell = "0.2.5"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# We need to freeze the version of the crate, as the raw-api feature is considered unstable
|
# We need to freeze the version of the crate, as the raw-api feature is considered unstable
|
||||||
dashmap = { version = "=5.5.3", features = ["raw-api"] }
|
dashmap = { version = "=5.5.3", features = ["raw-api"] }
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
//! base_db defines basic database traits. The concrete DB is defined by ide.
|
//! base_db defines basic database traits. The concrete DB is defined by ide.
|
||||||
|
|
||||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
|
||||||
|
|
||||||
mod change;
|
mod change;
|
||||||
mod input;
|
mod input;
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
//! cfg defines conditional compiling options, `cfg` attribute parser and evaluator
|
//! cfg defines conditional compiling options, `cfg` attribute parser and evaluator
|
||||||
|
|
||||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
|
||||||
|
|
||||||
mod cfg_expr;
|
mod cfg_expr;
|
||||||
mod dnf;
|
mod dnf;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
@ -6,8 +6,6 @@
|
||||||
// addition to `cargo check`. Either split it into 3 crates (one for test, one for check
|
// addition to `cargo check`. Either split it into 3 crates (one for test, one for check
|
||||||
// and one common utilities) or change its name and docs to reflect the current state.
|
// and one common utilities) or change its name and docs to reflect the current state.
|
||||||
|
|
||||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
|
||||||
|
|
||||||
use std::{fmt, io, process::Command, time::Duration};
|
use std::{fmt, io, process::Command, time::Duration};
|
||||||
|
|
||||||
use crossbeam_channel::{never, select, unbounded, Receiver, Sender};
|
use crossbeam_channel::{never, select, unbounded, Receiver, Sender};
|
||||||
|
@ -428,6 +426,8 @@ impl FlycheckActor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
cmd.arg("--keep-going");
|
||||||
|
|
||||||
options.apply_on_command(&mut cmd);
|
options.apply_on_command(&mut cmd);
|
||||||
(cmd, options.extra_args.clone())
|
(cmd, options.extra_args.clone())
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,6 +28,7 @@ tracing.workspace = true
|
||||||
smallvec.workspace = true
|
smallvec.workspace = true
|
||||||
hashbrown.workspace = true
|
hashbrown.workspace = true
|
||||||
triomphe.workspace = true
|
triomphe.workspace = true
|
||||||
|
rustc_apfloat = "0.2.0"
|
||||||
|
|
||||||
ra-ap-rustc_parse_format.workspace = true
|
ra-ap-rustc_parse_format.workspace = true
|
||||||
ra-ap-rustc_abi.workspace = true
|
ra-ap-rustc_abi.workspace = true
|
||||||
|
@ -37,7 +38,6 @@ stdx.workspace = true
|
||||||
intern.workspace = true
|
intern.workspace = true
|
||||||
base-db.workspace = true
|
base-db.workspace = true
|
||||||
syntax.workspace = true
|
syntax.workspace = true
|
||||||
profile.workspace = true
|
|
||||||
hir-expand.workspace = true
|
hir-expand.workspace = true
|
||||||
mbe.workspace = true
|
mbe.workspace = true
|
||||||
cfg.workspace = true
|
cfg.workspace = true
|
||||||
|
|
|
@ -15,8 +15,8 @@ use span::AstIdMap;
|
||||||
use stdx::never;
|
use stdx::never;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{
|
ast::{
|
||||||
self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasLoopBody, HasName,
|
self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasGenericArgs,
|
||||||
RangeItem, SlicePatComponents,
|
HasLoopBody, HasName, RangeItem, SlicePatComponents,
|
||||||
},
|
},
|
||||||
AstNode, AstPtr, AstToken as _, SyntaxNodePtr,
|
AstNode, AstPtr, AstToken as _, SyntaxNodePtr,
|
||||||
};
|
};
|
||||||
|
|
|
@ -30,8 +30,10 @@ pub enum BuiltinUint {
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
pub enum BuiltinFloat {
|
pub enum BuiltinFloat {
|
||||||
|
F16,
|
||||||
F32,
|
F32,
|
||||||
F64,
|
F64,
|
||||||
|
F128,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
@ -65,8 +67,10 @@ impl BuiltinType {
|
||||||
(name![u64], BuiltinType::Uint(BuiltinUint::U64)),
|
(name![u64], BuiltinType::Uint(BuiltinUint::U64)),
|
||||||
(name![u128], BuiltinType::Uint(BuiltinUint::U128)),
|
(name![u128], BuiltinType::Uint(BuiltinUint::U128)),
|
||||||
|
|
||||||
|
(name![f16], BuiltinType::Float(BuiltinFloat::F16)),
|
||||||
(name![f32], BuiltinType::Float(BuiltinFloat::F32)),
|
(name![f32], BuiltinType::Float(BuiltinFloat::F32)),
|
||||||
(name![f64], BuiltinType::Float(BuiltinFloat::F64)),
|
(name![f64], BuiltinType::Float(BuiltinFloat::F64)),
|
||||||
|
(name![f128], BuiltinType::Float(BuiltinFloat::F128)),
|
||||||
];
|
];
|
||||||
|
|
||||||
pub fn by_name(name: &Name) -> Option<Self> {
|
pub fn by_name(name: &Name) -> Option<Self> {
|
||||||
|
@ -97,8 +101,10 @@ impl AsName for BuiltinType {
|
||||||
BuiltinUint::U128 => name![u128],
|
BuiltinUint::U128 => name![u128],
|
||||||
},
|
},
|
||||||
BuiltinType::Float(it) => match it {
|
BuiltinType::Float(it) => match it {
|
||||||
|
BuiltinFloat::F16 => name![f16],
|
||||||
BuiltinFloat::F32 => name![f32],
|
BuiltinFloat::F32 => name![f32],
|
||||||
BuiltinFloat::F64 => name![f64],
|
BuiltinFloat::F64 => name![f64],
|
||||||
|
BuiltinFloat::F128 => name![f128],
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -155,8 +161,10 @@ impl BuiltinUint {
|
||||||
impl BuiltinFloat {
|
impl BuiltinFloat {
|
||||||
pub fn from_suffix(suffix: &str) -> Option<BuiltinFloat> {
|
pub fn from_suffix(suffix: &str) -> Option<BuiltinFloat> {
|
||||||
let res = match suffix {
|
let res = match suffix {
|
||||||
|
"f16" => BuiltinFloat::F16,
|
||||||
"f32" => BuiltinFloat::F32,
|
"f32" => BuiltinFloat::F32,
|
||||||
"f64" => BuiltinFloat::F64,
|
"f64" => BuiltinFloat::F64,
|
||||||
|
"f128" => BuiltinFloat::F128,
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
Some(res)
|
Some(res)
|
||||||
|
@ -192,8 +200,10 @@ impl fmt::Display for BuiltinUint {
|
||||||
impl fmt::Display for BuiltinFloat {
|
impl fmt::Display for BuiltinFloat {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
f.write_str(match self {
|
f.write_str(match self {
|
||||||
|
BuiltinFloat::F16 => "f16",
|
||||||
BuiltinFloat::F32 => "f32",
|
BuiltinFloat::F32 => "f32",
|
||||||
BuiltinFloat::F64 => "f64",
|
BuiltinFloat::F64 => "f64",
|
||||||
|
BuiltinFloat::F128 => "f128",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -214,8 +214,8 @@ impl ChildBySource for GenericDefId {
|
||||||
}
|
}
|
||||||
|
|
||||||
let generic_params = db.generic_params(*self);
|
let generic_params = db.generic_params(*self);
|
||||||
let mut toc_idx_iter = generic_params.type_or_consts.iter().map(|(idx, _)| idx);
|
let mut toc_idx_iter = generic_params.iter_type_or_consts().map(|(idx, _)| idx);
|
||||||
let lts_idx_iter = generic_params.lifetimes.iter().map(|(idx, _)| idx);
|
let lts_idx_iter = generic_params.iter_lt().map(|(idx, _)| idx);
|
||||||
|
|
||||||
// For traits the first type index is `Self`, skip it.
|
// For traits the first type index is `Self`, skip it.
|
||||||
if let GenericDefId::TraitId(_) = *self {
|
if let GenericDefId::TraitId(_) = *self {
|
||||||
|
|
|
@ -323,7 +323,7 @@ impl TraitAliasData {
|
||||||
pub struct ImplData {
|
pub struct ImplData {
|
||||||
pub target_trait: Option<Interned<TraitRef>>,
|
pub target_trait: Option<Interned<TraitRef>>,
|
||||||
pub self_ty: Interned<TypeRef>,
|
pub self_ty: Interned<TypeRef>,
|
||||||
pub items: Vec<AssocItemId>,
|
pub items: Box<[AssocItemId]>,
|
||||||
pub is_negative: bool,
|
pub is_negative: bool,
|
||||||
pub is_unsafe: bool,
|
pub is_unsafe: bool,
|
||||||
// box it as the vec is usually empty anyways
|
// box it as the vec is usually empty anyways
|
||||||
|
@ -637,10 +637,6 @@ impl<'a> AssocItemCollector<'a> {
|
||||||
attr,
|
attr,
|
||||||
) {
|
) {
|
||||||
Ok(ResolvedAttr::Macro(call_id)) => {
|
Ok(ResolvedAttr::Macro(call_id)) => {
|
||||||
// If proc attribute macro expansion is disabled, skip expanding it here
|
|
||||||
if !self.db.expand_proc_attr_macros() {
|
|
||||||
continue 'attrs;
|
|
||||||
}
|
|
||||||
let loc = self.db.lookup_intern_macro_call(call_id);
|
let loc = self.db.lookup_intern_macro_call(call_id);
|
||||||
if let MacroDefKind::ProcMacro(_, exp, _) = loc.def.kind {
|
if let MacroDefKind::ProcMacro(_, exp, _) = loc.def.kind {
|
||||||
// If there's no expander for the proc macro (e.g. the
|
// If there's no expander for the proc macro (e.g. the
|
||||||
|
|
|
@ -80,9 +80,11 @@ pub trait InternDatabase: SourceDatabase {
|
||||||
|
|
||||||
#[salsa::query_group(DefDatabaseStorage)]
|
#[salsa::query_group(DefDatabaseStorage)]
|
||||||
pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDatabase> {
|
pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDatabase> {
|
||||||
|
/// Whether to expand procedural macros during name resolution.
|
||||||
#[salsa::input]
|
#[salsa::input]
|
||||||
fn expand_proc_attr_macros(&self) -> bool;
|
fn expand_proc_attr_macros(&self) -> bool;
|
||||||
|
|
||||||
|
/// Computes an [`ItemTree`] for the given file or macro expansion.
|
||||||
#[salsa::invoke(ItemTree::file_item_tree_query)]
|
#[salsa::invoke(ItemTree::file_item_tree_query)]
|
||||||
fn file_item_tree(&self, file_id: HirFileId) -> Arc<ItemTree>;
|
fn file_item_tree(&self, file_id: HirFileId) -> Arc<ItemTree>;
|
||||||
|
|
||||||
|
@ -96,6 +98,7 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
|
||||||
#[salsa::invoke(DefMap::block_def_map_query)]
|
#[salsa::invoke(DefMap::block_def_map_query)]
|
||||||
fn block_def_map(&self, block: BlockId) -> Arc<DefMap>;
|
fn block_def_map(&self, block: BlockId) -> Arc<DefMap>;
|
||||||
|
|
||||||
|
/// Turns a MacroId into a MacroDefId, describing the macro's definition post name resolution.
|
||||||
fn macro_def(&self, m: MacroId) -> MacroDefId;
|
fn macro_def(&self, m: MacroId) -> MacroDefId;
|
||||||
|
|
||||||
// region:data
|
// region:data
|
||||||
|
@ -190,6 +193,7 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
|
||||||
#[salsa::invoke(Attrs::fields_attrs_query)]
|
#[salsa::invoke(Attrs::fields_attrs_query)]
|
||||||
fn fields_attrs(&self, def: VariantId) -> Arc<ArenaMap<LocalFieldId, Attrs>>;
|
fn fields_attrs(&self, def: VariantId) -> Arc<ArenaMap<LocalFieldId, Attrs>>;
|
||||||
|
|
||||||
|
// should this really be a query?
|
||||||
#[salsa::invoke(crate::attr::fields_attrs_source_map)]
|
#[salsa::invoke(crate::attr::fields_attrs_source_map)]
|
||||||
fn fields_attrs_source_map(
|
fn fields_attrs_source_map(
|
||||||
&self,
|
&self,
|
||||||
|
|
|
@ -183,6 +183,8 @@ fn find_path_for_module(
|
||||||
let kind = if name_already_occupied_in_type_ns {
|
let kind = if name_already_occupied_in_type_ns {
|
||||||
cov_mark::hit!(ambiguous_crate_start);
|
cov_mark::hit!(ambiguous_crate_start);
|
||||||
PathKind::Abs
|
PathKind::Abs
|
||||||
|
} else if ctx.cfg.prefer_absolute {
|
||||||
|
PathKind::Abs
|
||||||
} else {
|
} else {
|
||||||
PathKind::Plain
|
PathKind::Plain
|
||||||
};
|
};
|
||||||
|
@ -564,7 +566,13 @@ mod tests {
|
||||||
/// item the `path` refers to returns that same path when called from the
|
/// item the `path` refers to returns that same path when called from the
|
||||||
/// module the cursor is in.
|
/// module the cursor is in.
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn check_found_path_(ra_fixture: &str, path: &str, prefer_prelude: bool, expect: Expect) {
|
fn check_found_path_(
|
||||||
|
ra_fixture: &str,
|
||||||
|
path: &str,
|
||||||
|
prefer_prelude: bool,
|
||||||
|
prefer_absolute: bool,
|
||||||
|
expect: Expect,
|
||||||
|
) {
|
||||||
let (db, pos) = TestDB::with_position(ra_fixture);
|
let (db, pos) = TestDB::with_position(ra_fixture);
|
||||||
let module = db.module_at_position(pos);
|
let module = db.module_at_position(pos);
|
||||||
let parsed_path_file =
|
let parsed_path_file =
|
||||||
|
@ -604,7 +612,7 @@ mod tests {
|
||||||
module,
|
module,
|
||||||
prefix,
|
prefix,
|
||||||
ignore_local_imports,
|
ignore_local_imports,
|
||||||
ImportPathConfig { prefer_no_std: false, prefer_prelude },
|
ImportPathConfig { prefer_no_std: false, prefer_prelude, prefer_absolute },
|
||||||
);
|
);
|
||||||
format_to!(
|
format_to!(
|
||||||
res,
|
res,
|
||||||
|
@ -619,11 +627,15 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_found_path(ra_fixture: &str, path: &str, expect: Expect) {
|
fn check_found_path(ra_fixture: &str, path: &str, expect: Expect) {
|
||||||
check_found_path_(ra_fixture, path, false, expect);
|
check_found_path_(ra_fixture, path, false, false, expect);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_found_path_prelude(ra_fixture: &str, path: &str, expect: Expect) {
|
fn check_found_path_prelude(ra_fixture: &str, path: &str, expect: Expect) {
|
||||||
check_found_path_(ra_fixture, path, true, expect);
|
check_found_path_(ra_fixture, path, true, false, expect);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_found_path_absolute(ra_fixture: &str, path: &str, expect: Expect) {
|
||||||
|
check_found_path_(ra_fixture, path, false, true, expect);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -870,6 +882,39 @@ pub mod ast {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn partially_imported_with_prefer_absolute() {
|
||||||
|
cov_mark::check!(partially_imported);
|
||||||
|
// Similar to partially_imported test case above, but with prefer_absolute enabled.
|
||||||
|
// Even if the actual imported item is in external crate, if the path to that item
|
||||||
|
// is starting from the imported name, then the path should not start from "::".
|
||||||
|
// i.e. The first line in the expected output should not start from "::".
|
||||||
|
check_found_path_absolute(
|
||||||
|
r#"
|
||||||
|
//- /main.rs crate:main deps:syntax
|
||||||
|
|
||||||
|
use syntax::ast;
|
||||||
|
$0
|
||||||
|
|
||||||
|
//- /lib.rs crate:syntax
|
||||||
|
pub mod ast {
|
||||||
|
pub enum ModuleItem {
|
||||||
|
A, B, C,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
"syntax::ast::ModuleItem",
|
||||||
|
expect![[r#"
|
||||||
|
Plain (imports ✔): ast::ModuleItem
|
||||||
|
Plain (imports ✖): ::syntax::ast::ModuleItem
|
||||||
|
ByCrate(imports ✔): crate::ast::ModuleItem
|
||||||
|
ByCrate(imports ✖): ::syntax::ast::ModuleItem
|
||||||
|
BySelf (imports ✔): self::ast::ModuleItem
|
||||||
|
BySelf (imports ✖): ::syntax::ast::ModuleItem
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn same_crate_reexport() {
|
fn same_crate_reexport() {
|
||||||
check_found_path(
|
check_found_path(
|
||||||
|
@ -1769,6 +1814,43 @@ pub mod foo {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn respects_absolute_setting() {
|
||||||
|
let ra_fixture = r#"
|
||||||
|
//- /main.rs crate:main deps:krate
|
||||||
|
$0
|
||||||
|
//- /krate.rs crate:krate
|
||||||
|
pub mod foo {
|
||||||
|
pub struct Foo;
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
check_found_path(
|
||||||
|
ra_fixture,
|
||||||
|
"krate::foo::Foo",
|
||||||
|
expect![[r#"
|
||||||
|
Plain (imports ✔): krate::foo::Foo
|
||||||
|
Plain (imports ✖): krate::foo::Foo
|
||||||
|
ByCrate(imports ✔): krate::foo::Foo
|
||||||
|
ByCrate(imports ✖): krate::foo::Foo
|
||||||
|
BySelf (imports ✔): krate::foo::Foo
|
||||||
|
BySelf (imports ✖): krate::foo::Foo
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
|
||||||
|
check_found_path_absolute(
|
||||||
|
ra_fixture,
|
||||||
|
"krate::foo::Foo",
|
||||||
|
expect![[r#"
|
||||||
|
Plain (imports ✔): ::krate::foo::Foo
|
||||||
|
Plain (imports ✖): ::krate::foo::Foo
|
||||||
|
ByCrate(imports ✔): ::krate::foo::Foo
|
||||||
|
ByCrate(imports ✖): ::krate::foo::Foo
|
||||||
|
BySelf (imports ✔): ::krate::foo::Foo
|
||||||
|
BySelf (imports ✖): ::krate::foo::Foo
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn respect_segment_length() {
|
fn respect_segment_length() {
|
||||||
check_found_path(
|
check_found_path(
|
||||||
|
|
|
@ -28,6 +28,7 @@ use crate::{
|
||||||
LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId,
|
LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/// The index of the self param in the generic of the non-parent definition.
|
||||||
const SELF_PARAM_ID_IN_SELF: la_arena::Idx<TypeOrConstParamData> =
|
const SELF_PARAM_ID_IN_SELF: la_arena::Idx<TypeOrConstParamData> =
|
||||||
LocalTypeOrConstParamId::from_raw(RawIdx::from_u32(0));
|
LocalTypeOrConstParamId::from_raw(RawIdx::from_u32(0));
|
||||||
|
|
||||||
|
@ -158,9 +159,9 @@ pub enum GenericParamDataRef<'a> {
|
||||||
/// Data about the generic parameters of a function, struct, impl, etc.
|
/// Data about the generic parameters of a function, struct, impl, etc.
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
|
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
|
||||||
pub struct GenericParams {
|
pub struct GenericParams {
|
||||||
pub type_or_consts: Arena<TypeOrConstParamData>,
|
type_or_consts: Arena<TypeOrConstParamData>,
|
||||||
pub lifetimes: Arena<LifetimeParamData>,
|
lifetimes: Arena<LifetimeParamData>,
|
||||||
pub where_predicates: Box<[WherePredicate]>,
|
where_predicates: Box<[WherePredicate]>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ops::Index<LocalTypeOrConstParamId> for GenericParams {
|
impl ops::Index<LocalTypeOrConstParamId> for GenericParams {
|
||||||
|
@ -205,6 +206,219 @@ pub enum WherePredicateTypeTarget {
|
||||||
TypeOrConstParam(LocalTypeOrConstParamId),
|
TypeOrConstParam(LocalTypeOrConstParamId),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl GenericParams {
|
||||||
|
/// Number of Generic parameters (type_or_consts + lifetimes)
|
||||||
|
#[inline]
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.type_or_consts.len() + self.lifetimes.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn len_lifetimes(&self) -> usize {
|
||||||
|
self.lifetimes.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn len_type_or_consts(&self) -> usize {
|
||||||
|
self.type_or_consts.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.len() == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn where_predicates(&self) -> std::slice::Iter<'_, WherePredicate> {
|
||||||
|
self.where_predicates.iter()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Iterator of type_or_consts field
|
||||||
|
#[inline]
|
||||||
|
pub fn iter_type_or_consts(
|
||||||
|
&self,
|
||||||
|
) -> impl DoubleEndedIterator<Item = (LocalTypeOrConstParamId, &TypeOrConstParamData)> {
|
||||||
|
self.type_or_consts.iter()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Iterator of lifetimes field
|
||||||
|
#[inline]
|
||||||
|
pub fn iter_lt(
|
||||||
|
&self,
|
||||||
|
) -> impl DoubleEndedIterator<Item = (LocalLifetimeParamId, &LifetimeParamData)> {
|
||||||
|
self.lifetimes.iter()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn find_type_by_name(&self, name: &Name, parent: GenericDefId) -> Option<TypeParamId> {
|
||||||
|
self.type_or_consts.iter().find_map(|(id, p)| {
|
||||||
|
if p.name().as_ref() == Some(&name) && p.type_param().is_some() {
|
||||||
|
Some(TypeParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent }))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn find_const_by_name(&self, name: &Name, parent: GenericDefId) -> Option<ConstParamId> {
|
||||||
|
self.type_or_consts.iter().find_map(|(id, p)| {
|
||||||
|
if p.name().as_ref() == Some(&name) && p.const_param().is_some() {
|
||||||
|
Some(ConstParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent }))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn trait_self_param(&self) -> Option<LocalTypeOrConstParamId> {
|
||||||
|
if self.type_or_consts.is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
matches!(
|
||||||
|
self.type_or_consts[SELF_PARAM_ID_IN_SELF],
|
||||||
|
TypeOrConstParamData::TypeParamData(TypeParamData {
|
||||||
|
provenance: TypeParamProvenance::TraitSelf,
|
||||||
|
..
|
||||||
|
})
|
||||||
|
)
|
||||||
|
.then(|| SELF_PARAM_ID_IN_SELF)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn find_lifetime_by_name(
|
||||||
|
&self,
|
||||||
|
name: &Name,
|
||||||
|
parent: GenericDefId,
|
||||||
|
) -> Option<LifetimeParamId> {
|
||||||
|
self.lifetimes.iter().find_map(|(id, p)| {
|
||||||
|
if &p.name == name {
|
||||||
|
Some(LifetimeParamId { local_id: id, parent })
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn generic_params_query(
|
||||||
|
db: &dyn DefDatabase,
|
||||||
|
def: GenericDefId,
|
||||||
|
) -> Interned<GenericParams> {
|
||||||
|
let _p = tracing::info_span!("generic_params_query").entered();
|
||||||
|
|
||||||
|
let krate = def.krate(db);
|
||||||
|
let cfg_options = db.crate_graph();
|
||||||
|
let cfg_options = &cfg_options[krate].cfg_options;
|
||||||
|
|
||||||
|
// Returns the generic parameters that are enabled under the current `#[cfg]` options
|
||||||
|
let enabled_params =
|
||||||
|
|params: &Interned<GenericParams>, item_tree: &ItemTree, parent: GenericModItem| {
|
||||||
|
let enabled = |param| item_tree.attrs(db, krate, param).is_cfg_enabled(cfg_options);
|
||||||
|
let attr_owner_ct = |param| AttrOwner::TypeOrConstParamData(parent, param);
|
||||||
|
let attr_owner_lt = |param| AttrOwner::LifetimeParamData(parent, param);
|
||||||
|
|
||||||
|
// In the common case, no parameters will by disabled by `#[cfg]` attributes.
|
||||||
|
// Therefore, make a first pass to check if all parameters are enabled and, if so,
|
||||||
|
// clone the `Interned<GenericParams>` instead of recreating an identical copy.
|
||||||
|
let all_type_or_consts_enabled =
|
||||||
|
params.type_or_consts.iter().all(|(idx, _)| enabled(attr_owner_ct(idx)));
|
||||||
|
let all_lifetimes_enabled =
|
||||||
|
params.lifetimes.iter().all(|(idx, _)| enabled(attr_owner_lt(idx)));
|
||||||
|
|
||||||
|
if all_type_or_consts_enabled && all_lifetimes_enabled {
|
||||||
|
params.clone()
|
||||||
|
} else {
|
||||||
|
Interned::new(GenericParams {
|
||||||
|
type_or_consts: all_type_or_consts_enabled
|
||||||
|
.then(|| params.type_or_consts.clone())
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
params
|
||||||
|
.type_or_consts
|
||||||
|
.iter()
|
||||||
|
.filter(|&(idx, _)| enabled(attr_owner_ct(idx)))
|
||||||
|
.map(|(_, param)| param.clone())
|
||||||
|
.collect()
|
||||||
|
}),
|
||||||
|
lifetimes: all_lifetimes_enabled
|
||||||
|
.then(|| params.lifetimes.clone())
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
params
|
||||||
|
.lifetimes
|
||||||
|
.iter()
|
||||||
|
.filter(|&(idx, _)| enabled(attr_owner_lt(idx)))
|
||||||
|
.map(|(_, param)| param.clone())
|
||||||
|
.collect()
|
||||||
|
}),
|
||||||
|
where_predicates: params.where_predicates.clone(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
};
|
||||||
|
fn id_to_generics<Id: GenericsItemTreeNode>(
|
||||||
|
db: &dyn DefDatabase,
|
||||||
|
id: impl for<'db> Lookup<
|
||||||
|
Database<'db> = dyn DefDatabase + 'db,
|
||||||
|
Data = impl ItemTreeLoc<Id = Id>,
|
||||||
|
>,
|
||||||
|
enabled_params: impl Fn(
|
||||||
|
&Interned<GenericParams>,
|
||||||
|
&ItemTree,
|
||||||
|
GenericModItem,
|
||||||
|
) -> Interned<GenericParams>,
|
||||||
|
) -> Interned<GenericParams>
|
||||||
|
where
|
||||||
|
FileItemTreeId<Id>: Into<GenericModItem>,
|
||||||
|
{
|
||||||
|
let id = id.lookup(db).item_tree_id();
|
||||||
|
let tree = id.item_tree(db);
|
||||||
|
let item = &tree[id.value];
|
||||||
|
enabled_params(item.generic_params(), &tree, id.value.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
match def {
|
||||||
|
GenericDefId::FunctionId(id) => {
|
||||||
|
let loc = id.lookup(db);
|
||||||
|
let tree = loc.id.item_tree(db);
|
||||||
|
let item = &tree[loc.id.value];
|
||||||
|
|
||||||
|
let enabled_params =
|
||||||
|
enabled_params(&item.explicit_generic_params, &tree, loc.id.value.into());
|
||||||
|
|
||||||
|
let module = loc.container.module(db);
|
||||||
|
let func_data = db.function_data(id);
|
||||||
|
if func_data.params.is_empty() {
|
||||||
|
enabled_params
|
||||||
|
} else {
|
||||||
|
let mut generic_params = GenericParamsCollector {
|
||||||
|
type_or_consts: enabled_params.type_or_consts.clone(),
|
||||||
|
lifetimes: enabled_params.lifetimes.clone(),
|
||||||
|
where_predicates: enabled_params.where_predicates.clone().into(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Don't create an `Expander` if not needed since this
|
||||||
|
// could cause a reparse after the `ItemTree` has been created due to the spanmap.
|
||||||
|
let mut expander = Lazy::new(|| {
|
||||||
|
(module.def_map(db), Expander::new(db, loc.id.file_id(), module))
|
||||||
|
});
|
||||||
|
for param in func_data.params.iter() {
|
||||||
|
generic_params.fill_implicit_impl_trait_args(db, &mut expander, param);
|
||||||
|
}
|
||||||
|
Interned::new(generic_params.finish())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
GenericDefId::AdtId(AdtId::StructId(id)) => id_to_generics(db, id, enabled_params),
|
||||||
|
GenericDefId::AdtId(AdtId::EnumId(id)) => id_to_generics(db, id, enabled_params),
|
||||||
|
GenericDefId::AdtId(AdtId::UnionId(id)) => id_to_generics(db, id, enabled_params),
|
||||||
|
GenericDefId::TraitId(id) => id_to_generics(db, id, enabled_params),
|
||||||
|
GenericDefId::TraitAliasId(id) => id_to_generics(db, id, enabled_params),
|
||||||
|
GenericDefId::TypeAliasId(id) => id_to_generics(db, id, enabled_params),
|
||||||
|
GenericDefId::ImplId(id) => id_to_generics(db, id, enabled_params),
|
||||||
|
GenericDefId::ConstId(_) => Interned::new(GenericParams {
|
||||||
|
type_or_consts: Default::default(),
|
||||||
|
lifetimes: Default::default(),
|
||||||
|
where_predicates: Default::default(),
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Default)]
|
#[derive(Clone, Default)]
|
||||||
pub(crate) struct GenericParamsCollector {
|
pub(crate) struct GenericParamsCollector {
|
||||||
pub(crate) type_or_consts: Arena<TypeOrConstParamData>,
|
pub(crate) type_or_consts: Arena<TypeOrConstParamData>,
|
||||||
|
@ -441,202 +655,3 @@ impl GenericParamsCollector {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GenericParams {
|
|
||||||
/// Number of Generic parameters (type_or_consts + lifetimes)
|
|
||||||
#[inline]
|
|
||||||
pub fn len(&self) -> usize {
|
|
||||||
self.type_or_consts.len() + self.lifetimes.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
pub fn is_empty(&self) -> bool {
|
|
||||||
self.len() == 0
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Iterator of type_or_consts field
|
|
||||||
#[inline]
|
|
||||||
pub fn iter_type_or_consts(
|
|
||||||
&self,
|
|
||||||
) -> impl DoubleEndedIterator<Item = (LocalTypeOrConstParamId, &TypeOrConstParamData)> {
|
|
||||||
self.type_or_consts.iter()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Iterator of lifetimes field
|
|
||||||
#[inline]
|
|
||||||
pub fn iter_lt(
|
|
||||||
&self,
|
|
||||||
) -> impl DoubleEndedIterator<Item = (LocalLifetimeParamId, &LifetimeParamData)> {
|
|
||||||
self.lifetimes.iter()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn generic_params_query(
|
|
||||||
db: &dyn DefDatabase,
|
|
||||||
def: GenericDefId,
|
|
||||||
) -> Interned<GenericParams> {
|
|
||||||
let _p = tracing::info_span!("generic_params_query").entered();
|
|
||||||
|
|
||||||
let krate = def.module(db).krate;
|
|
||||||
let cfg_options = db.crate_graph();
|
|
||||||
let cfg_options = &cfg_options[krate].cfg_options;
|
|
||||||
|
|
||||||
// Returns the generic parameters that are enabled under the current `#[cfg]` options
|
|
||||||
let enabled_params =
|
|
||||||
|params: &Interned<GenericParams>, item_tree: &ItemTree, parent: GenericModItem| {
|
|
||||||
let enabled = |param| item_tree.attrs(db, krate, param).is_cfg_enabled(cfg_options);
|
|
||||||
let attr_owner_ct = |param| AttrOwner::TypeOrConstParamData(parent, param);
|
|
||||||
let attr_owner_lt = |param| AttrOwner::LifetimeParamData(parent, param);
|
|
||||||
|
|
||||||
// In the common case, no parameters will by disabled by `#[cfg]` attributes.
|
|
||||||
// Therefore, make a first pass to check if all parameters are enabled and, if so,
|
|
||||||
// clone the `Interned<GenericParams>` instead of recreating an identical copy.
|
|
||||||
let all_type_or_consts_enabled =
|
|
||||||
params.type_or_consts.iter().all(|(idx, _)| enabled(attr_owner_ct(idx)));
|
|
||||||
let all_lifetimes_enabled =
|
|
||||||
params.lifetimes.iter().all(|(idx, _)| enabled(attr_owner_lt(idx)));
|
|
||||||
|
|
||||||
if all_type_or_consts_enabled && all_lifetimes_enabled {
|
|
||||||
params.clone()
|
|
||||||
} else {
|
|
||||||
Interned::new(GenericParams {
|
|
||||||
type_or_consts: all_type_or_consts_enabled
|
|
||||||
.then(|| params.type_or_consts.clone())
|
|
||||||
.unwrap_or_else(|| {
|
|
||||||
params
|
|
||||||
.type_or_consts
|
|
||||||
.iter()
|
|
||||||
.filter(|&(idx, _)| enabled(attr_owner_ct(idx)))
|
|
||||||
.map(|(_, param)| param.clone())
|
|
||||||
.collect()
|
|
||||||
}),
|
|
||||||
lifetimes: all_lifetimes_enabled
|
|
||||||
.then(|| params.lifetimes.clone())
|
|
||||||
.unwrap_or_else(|| {
|
|
||||||
params
|
|
||||||
.lifetimes
|
|
||||||
.iter()
|
|
||||||
.filter(|&(idx, _)| enabled(attr_owner_lt(idx)))
|
|
||||||
.map(|(_, param)| param.clone())
|
|
||||||
.collect()
|
|
||||||
}),
|
|
||||||
where_predicates: params.where_predicates.clone(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
};
|
|
||||||
fn id_to_generics<Id: GenericsItemTreeNode>(
|
|
||||||
db: &dyn DefDatabase,
|
|
||||||
id: impl for<'db> Lookup<
|
|
||||||
Database<'db> = dyn DefDatabase + 'db,
|
|
||||||
Data = impl ItemTreeLoc<Id = Id>,
|
|
||||||
>,
|
|
||||||
enabled_params: impl Fn(
|
|
||||||
&Interned<GenericParams>,
|
|
||||||
&ItemTree,
|
|
||||||
GenericModItem,
|
|
||||||
) -> Interned<GenericParams>,
|
|
||||||
) -> Interned<GenericParams>
|
|
||||||
where
|
|
||||||
FileItemTreeId<Id>: Into<GenericModItem>,
|
|
||||||
{
|
|
||||||
let id = id.lookup(db).item_tree_id();
|
|
||||||
let tree = id.item_tree(db);
|
|
||||||
let item = &tree[id.value];
|
|
||||||
enabled_params(item.generic_params(), &tree, id.value.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
match def {
|
|
||||||
GenericDefId::FunctionId(id) => {
|
|
||||||
let loc = id.lookup(db);
|
|
||||||
let tree = loc.id.item_tree(db);
|
|
||||||
let item = &tree[loc.id.value];
|
|
||||||
|
|
||||||
let enabled_params =
|
|
||||||
enabled_params(&item.explicit_generic_params, &tree, loc.id.value.into());
|
|
||||||
|
|
||||||
let module = loc.container.module(db);
|
|
||||||
let func_data = db.function_data(id);
|
|
||||||
if func_data.params.is_empty() {
|
|
||||||
enabled_params
|
|
||||||
} else {
|
|
||||||
let mut generic_params = GenericParamsCollector {
|
|
||||||
type_or_consts: enabled_params.type_or_consts.clone(),
|
|
||||||
lifetimes: enabled_params.lifetimes.clone(),
|
|
||||||
where_predicates: enabled_params.where_predicates.clone().into(),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Don't create an `Expander` if not needed since this
|
|
||||||
// could cause a reparse after the `ItemTree` has been created due to the spanmap.
|
|
||||||
let mut expander = Lazy::new(|| {
|
|
||||||
(module.def_map(db), Expander::new(db, loc.id.file_id(), module))
|
|
||||||
});
|
|
||||||
for param in func_data.params.iter() {
|
|
||||||
generic_params.fill_implicit_impl_trait_args(db, &mut expander, param);
|
|
||||||
}
|
|
||||||
Interned::new(generic_params.finish())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
GenericDefId::AdtId(AdtId::StructId(id)) => id_to_generics(db, id, enabled_params),
|
|
||||||
GenericDefId::AdtId(AdtId::EnumId(id)) => id_to_generics(db, id, enabled_params),
|
|
||||||
GenericDefId::AdtId(AdtId::UnionId(id)) => id_to_generics(db, id, enabled_params),
|
|
||||||
GenericDefId::TraitId(id) => id_to_generics(db, id, enabled_params),
|
|
||||||
GenericDefId::TraitAliasId(id) => id_to_generics(db, id, enabled_params),
|
|
||||||
GenericDefId::TypeAliasId(id) => id_to_generics(db, id, enabled_params),
|
|
||||||
GenericDefId::ImplId(id) => id_to_generics(db, id, enabled_params),
|
|
||||||
GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => {
|
|
||||||
Interned::new(GenericParams {
|
|
||||||
type_or_consts: Default::default(),
|
|
||||||
lifetimes: Default::default(),
|
|
||||||
where_predicates: Default::default(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn find_type_by_name(&self, name: &Name, parent: GenericDefId) -> Option<TypeParamId> {
|
|
||||||
self.type_or_consts.iter().find_map(|(id, p)| {
|
|
||||||
if p.name().as_ref() == Some(&name) && p.type_param().is_some() {
|
|
||||||
Some(TypeParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent }))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn find_const_by_name(&self, name: &Name, parent: GenericDefId) -> Option<ConstParamId> {
|
|
||||||
self.type_or_consts.iter().find_map(|(id, p)| {
|
|
||||||
if p.name().as_ref() == Some(&name) && p.const_param().is_some() {
|
|
||||||
Some(ConstParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent }))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn trait_self_param(&self) -> Option<LocalTypeOrConstParamId> {
|
|
||||||
if self.type_or_consts.is_empty() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
matches!(
|
|
||||||
self.type_or_consts[SELF_PARAM_ID_IN_SELF],
|
|
||||||
TypeOrConstParamData::TypeParamData(TypeParamData {
|
|
||||||
provenance: TypeParamProvenance::TraitSelf,
|
|
||||||
..
|
|
||||||
})
|
|
||||||
)
|
|
||||||
.then(|| SELF_PARAM_ID_IN_SELF)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn find_lifetime_by_name(
|
|
||||||
&self,
|
|
||||||
name: &Name,
|
|
||||||
parent: GenericDefId,
|
|
||||||
) -> Option<LifetimeParamId> {
|
|
||||||
self.lifetimes.iter().find_map(|(id, p)| {
|
|
||||||
if &p.name == name {
|
|
||||||
Some(LifetimeParamId { local_id: id, parent })
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -20,6 +20,7 @@ use std::fmt;
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
use la_arena::{Idx, RawIdx};
|
use la_arena::{Idx, RawIdx};
|
||||||
|
use rustc_apfloat::ieee::{Half as f16, Quad as f128};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
|
|
||||||
|
@ -56,29 +57,38 @@ pub struct Label {
|
||||||
}
|
}
|
||||||
pub type LabelId = Idx<Label>;
|
pub type LabelId = Idx<Label>;
|
||||||
|
|
||||||
// We convert float values into bits and that's how we don't need to deal with f32 and f64.
|
// We leave float values as a string to avoid double rounding.
|
||||||
// For PartialEq, bits comparison should work, as ordering is not important
|
// For PartialEq, string comparison should work, as ordering is not important
|
||||||
// https://github.com/rust-lang/rust-analyzer/issues/12380#issuecomment-1137284360
|
// https://github.com/rust-lang/rust-analyzer/issues/12380#issuecomment-1137284360
|
||||||
#[derive(Default, Debug, Clone, Copy, Eq, PartialEq)]
|
#[derive(Default, Debug, Clone, Eq, PartialEq)]
|
||||||
pub struct FloatTypeWrapper(u64);
|
pub struct FloatTypeWrapper(Box<str>);
|
||||||
|
|
||||||
|
// FIXME(#17451): Use builtin types once stabilised.
|
||||||
impl FloatTypeWrapper {
|
impl FloatTypeWrapper {
|
||||||
pub fn new(value: f64) -> Self {
|
pub fn new(value: String) -> Self {
|
||||||
Self(value.to_bits())
|
Self(value.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn into_f64(self) -> f64 {
|
pub fn to_f128(&self) -> f128 {
|
||||||
f64::from_bits(self.0)
|
self.0.parse().unwrap_or_default()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn into_f32(self) -> f32 {
|
pub fn to_f64(&self) -> f64 {
|
||||||
f64::from_bits(self.0) as f32
|
self.0.parse().unwrap_or_default()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_f32(&self) -> f32 {
|
||||||
|
self.0.parse().unwrap_or_default()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_f16(&self) -> f16 {
|
||||||
|
self.0.parse().unwrap_or_default()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for FloatTypeWrapper {
|
impl fmt::Display for FloatTypeWrapper {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
write!(f, "{:?}", f64::from_bits(self.0))
|
f.write_str(&self.0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -91,7 +101,7 @@ pub enum Literal {
|
||||||
Bool(bool),
|
Bool(bool),
|
||||||
Int(i128, Option<BuiltinInt>),
|
Int(i128, Option<BuiltinInt>),
|
||||||
Uint(u128, Option<BuiltinUint>),
|
Uint(u128, Option<BuiltinUint>),
|
||||||
// Here we are using a wrapper around float because f32 and f64 do not implement Eq, so they
|
// Here we are using a wrapper around float because float primitives do not implement Eq, so they
|
||||||
// could not be used directly here, to understand how the wrapper works go to definition of
|
// could not be used directly here, to understand how the wrapper works go to definition of
|
||||||
// FloatTypeWrapper
|
// FloatTypeWrapper
|
||||||
Float(FloatTypeWrapper, Option<BuiltinFloat>),
|
Float(FloatTypeWrapper, Option<BuiltinFloat>),
|
||||||
|
@ -120,10 +130,7 @@ impl From<ast::LiteralKind> for Literal {
|
||||||
match ast_lit_kind {
|
match ast_lit_kind {
|
||||||
LiteralKind::IntNumber(lit) => {
|
LiteralKind::IntNumber(lit) => {
|
||||||
if let builtin @ Some(_) = lit.suffix().and_then(BuiltinFloat::from_suffix) {
|
if let builtin @ Some(_) = lit.suffix().and_then(BuiltinFloat::from_suffix) {
|
||||||
Literal::Float(
|
Literal::Float(FloatTypeWrapper::new(lit.value_string()), builtin)
|
||||||
FloatTypeWrapper::new(lit.float_value().unwrap_or(Default::default())),
|
|
||||||
builtin,
|
|
||||||
)
|
|
||||||
} else if let builtin @ Some(_) = lit.suffix().and_then(BuiltinUint::from_suffix) {
|
} else if let builtin @ Some(_) = lit.suffix().and_then(BuiltinUint::from_suffix) {
|
||||||
Literal::Uint(lit.value().unwrap_or(0), builtin)
|
Literal::Uint(lit.value().unwrap_or(0), builtin)
|
||||||
} else {
|
} else {
|
||||||
|
@ -133,7 +140,7 @@ impl From<ast::LiteralKind> for Literal {
|
||||||
}
|
}
|
||||||
LiteralKind::FloatNumber(lit) => {
|
LiteralKind::FloatNumber(lit) => {
|
||||||
let ty = lit.suffix().and_then(BuiltinFloat::from_suffix);
|
let ty = lit.suffix().and_then(BuiltinFloat::from_suffix);
|
||||||
Literal::Float(FloatTypeWrapper::new(lit.value().unwrap_or(Default::default())), ty)
|
Literal::Float(FloatTypeWrapper::new(lit.value_string()), ty)
|
||||||
}
|
}
|
||||||
LiteralKind::ByteString(bs) => {
|
LiteralKind::ByteString(bs) => {
|
||||||
let text = bs.value().map_or_else(|_| Default::default(), Box::from);
|
let text = bs.value().map_or_else(|_| Default::default(), Box::from);
|
||||||
|
|
|
@ -10,7 +10,7 @@ use hir_expand::{
|
||||||
AstId,
|
AstId,
|
||||||
};
|
};
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
use syntax::ast::{self, HasName, IsString};
|
use syntax::ast::{self, HasGenericArgs, HasName, IsString};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
builtin_type::{BuiltinInt, BuiltinType, BuiltinUint},
|
builtin_type::{BuiltinInt, BuiltinType, BuiltinUint},
|
||||||
|
@ -245,7 +245,13 @@ impl TypeRef {
|
||||||
// for types are close enough for our purposes to the inner type for now...
|
// for types are close enough for our purposes to the inner type for now...
|
||||||
ast::Type::ForType(inner) => TypeRef::from_ast_opt(ctx, inner.ty()),
|
ast::Type::ForType(inner) => TypeRef::from_ast_opt(ctx, inner.ty()),
|
||||||
ast::Type::ImplTraitType(inner) => {
|
ast::Type::ImplTraitType(inner) => {
|
||||||
TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
|
if ctx.outer_impl_trait() {
|
||||||
|
// Disallow nested impl traits
|
||||||
|
TypeRef::Error
|
||||||
|
} else {
|
||||||
|
let _guard = ctx.outer_impl_trait_scope(true);
|
||||||
|
TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
ast::Type::DynTraitType(inner) => {
|
ast::Type::DynTraitType(inner) => {
|
||||||
TypeRef::DynTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
|
TypeRef::DynTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
|
||||||
|
|
|
@ -8,7 +8,6 @@ use hir_expand::{attrs::AttrId, db::ExpandDatabase, name::Name, AstId, MacroCall
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use la_arena::Idx;
|
use la_arena::Idx;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use profile::Count;
|
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
use stdx::format_to;
|
use stdx::format_to;
|
||||||
|
@ -65,8 +64,6 @@ pub struct ImportId {
|
||||||
|
|
||||||
#[derive(Debug, Default, PartialEq, Eq)]
|
#[derive(Debug, Default, PartialEq, Eq)]
|
||||||
pub struct ItemScope {
|
pub struct ItemScope {
|
||||||
_c: Count<Self>,
|
|
||||||
|
|
||||||
/// Defs visible in this scope. This includes `declarations`, but also
|
/// Defs visible in this scope. This includes `declarations`, but also
|
||||||
/// imports. The imports belong to this module and can be resolved by using them on
|
/// imports. The imports belong to this module and can be resolved by using them on
|
||||||
/// the `use_imports_*` fields.
|
/// the `use_imports_*` fields.
|
||||||
|
@ -722,7 +719,6 @@ impl ItemScope {
|
||||||
pub(crate) fn shrink_to_fit(&mut self) {
|
pub(crate) fn shrink_to_fit(&mut self) {
|
||||||
// Exhaustive match to require handling new fields.
|
// Exhaustive match to require handling new fields.
|
||||||
let Self {
|
let Self {
|
||||||
_c: _,
|
|
||||||
types,
|
types,
|
||||||
values,
|
values,
|
||||||
macros,
|
macros,
|
||||||
|
|
|
@ -48,6 +48,7 @@ use either::Either;
|
||||||
use hir_expand::{attrs::RawAttrs, name::Name, ExpandTo, HirFileId, InFile};
|
use hir_expand::{attrs::RawAttrs, name::Name, ExpandTo, HirFileId, InFile};
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
use la_arena::{Arena, Idx, IdxRange, RawIdx};
|
use la_arena::{Arena, Idx, IdxRange, RawIdx};
|
||||||
|
use once_cell::sync::OnceCell;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use span::{AstIdNode, FileAstId, SyntaxContextId};
|
use span::{AstIdNode, FileAstId, SyntaxContextId};
|
||||||
|
@ -100,6 +101,7 @@ pub struct ItemTree {
|
||||||
impl ItemTree {
|
impl ItemTree {
|
||||||
pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
|
pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
|
||||||
let _p = tracing::info_span!("file_item_tree_query", ?file_id).entered();
|
let _p = tracing::info_span!("file_item_tree_query", ?file_id).entered();
|
||||||
|
static EMPTY: OnceCell<Arc<ItemTree>> = OnceCell::new();
|
||||||
|
|
||||||
let syntax = db.parse_or_expand(file_id);
|
let syntax = db.parse_or_expand(file_id);
|
||||||
|
|
||||||
|
@ -131,18 +133,47 @@ impl ItemTree {
|
||||||
if let Some(attrs) = top_attrs {
|
if let Some(attrs) = top_attrs {
|
||||||
item_tree.attrs.insert(AttrOwner::TopLevel, attrs);
|
item_tree.attrs.insert(AttrOwner::TopLevel, attrs);
|
||||||
}
|
}
|
||||||
item_tree.shrink_to_fit();
|
if item_tree.data.is_none() && item_tree.top_level.is_empty() && item_tree.attrs.is_empty()
|
||||||
Arc::new(item_tree)
|
{
|
||||||
|
EMPTY
|
||||||
|
.get_or_init(|| {
|
||||||
|
Arc::new(ItemTree {
|
||||||
|
top_level: SmallVec::new_const(),
|
||||||
|
attrs: FxHashMap::default(),
|
||||||
|
data: None,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.clone()
|
||||||
|
} else {
|
||||||
|
item_tree.shrink_to_fit();
|
||||||
|
Arc::new(item_tree)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc<ItemTree> {
|
pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc<ItemTree> {
|
||||||
|
let _p = tracing::info_span!("block_item_tree_query", ?block).entered();
|
||||||
|
static EMPTY: OnceCell<Arc<ItemTree>> = OnceCell::new();
|
||||||
|
|
||||||
let loc = block.lookup(db);
|
let loc = block.lookup(db);
|
||||||
let block = loc.ast_id.to_node(db.upcast());
|
let block = loc.ast_id.to_node(db.upcast());
|
||||||
|
|
||||||
let ctx = lower::Ctx::new(db, loc.ast_id.file_id);
|
let ctx = lower::Ctx::new(db, loc.ast_id.file_id);
|
||||||
let mut item_tree = ctx.lower_block(&block);
|
let mut item_tree = ctx.lower_block(&block);
|
||||||
item_tree.shrink_to_fit();
|
if item_tree.data.is_none() && item_tree.top_level.is_empty() && item_tree.attrs.is_empty()
|
||||||
Arc::new(item_tree)
|
{
|
||||||
|
EMPTY
|
||||||
|
.get_or_init(|| {
|
||||||
|
Arc::new(ItemTree {
|
||||||
|
top_level: SmallVec::new_const(),
|
||||||
|
attrs: FxHashMap::default(),
|
||||||
|
data: None,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.clone()
|
||||||
|
} else {
|
||||||
|
item_tree.shrink_to_fit();
|
||||||
|
Arc::new(item_tree)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns an iterator over all items located at the top level of the `HirFileId` this
|
/// Returns an iterator over all items located at the top level of the `HirFileId` this
|
||||||
|
@ -585,24 +616,30 @@ impl Index<RawVisibilityId> for ItemTree {
|
||||||
type Output = RawVisibility;
|
type Output = RawVisibility;
|
||||||
fn index(&self, index: RawVisibilityId) -> &Self::Output {
|
fn index(&self, index: RawVisibilityId) -> &Self::Output {
|
||||||
static VIS_PUB: RawVisibility = RawVisibility::Public;
|
static VIS_PUB: RawVisibility = RawVisibility::Public;
|
||||||
static VIS_PRIV_IMPLICIT: RawVisibility = RawVisibility::Module(
|
static VIS_PRIV_IMPLICIT: OnceCell<RawVisibility> = OnceCell::new();
|
||||||
ModPath::from_kind(PathKind::SELF),
|
static VIS_PRIV_EXPLICIT: OnceCell<RawVisibility> = OnceCell::new();
|
||||||
VisibilityExplicitness::Implicit,
|
static VIS_PUB_CRATE: OnceCell<RawVisibility> = OnceCell::new();
|
||||||
);
|
|
||||||
static VIS_PRIV_EXPLICIT: RawVisibility = RawVisibility::Module(
|
|
||||||
ModPath::from_kind(PathKind::SELF),
|
|
||||||
VisibilityExplicitness::Explicit,
|
|
||||||
);
|
|
||||||
static VIS_PUB_CRATE: RawVisibility = RawVisibility::Module(
|
|
||||||
ModPath::from_kind(PathKind::Crate),
|
|
||||||
VisibilityExplicitness::Explicit,
|
|
||||||
);
|
|
||||||
|
|
||||||
match index {
|
match index {
|
||||||
RawVisibilityId::PRIV_IMPLICIT => &VIS_PRIV_IMPLICIT,
|
RawVisibilityId::PRIV_IMPLICIT => VIS_PRIV_IMPLICIT.get_or_init(|| {
|
||||||
RawVisibilityId::PRIV_EXPLICIT => &VIS_PRIV_EXPLICIT,
|
RawVisibility::Module(
|
||||||
|
Interned::new(ModPath::from_kind(PathKind::SELF)),
|
||||||
|
VisibilityExplicitness::Implicit,
|
||||||
|
)
|
||||||
|
}),
|
||||||
|
RawVisibilityId::PRIV_EXPLICIT => VIS_PRIV_EXPLICIT.get_or_init(|| {
|
||||||
|
RawVisibility::Module(
|
||||||
|
Interned::new(ModPath::from_kind(PathKind::SELF)),
|
||||||
|
VisibilityExplicitness::Explicit,
|
||||||
|
)
|
||||||
|
}),
|
||||||
RawVisibilityId::PUB => &VIS_PUB,
|
RawVisibilityId::PUB => &VIS_PUB,
|
||||||
RawVisibilityId::PUB_CRATE => &VIS_PUB_CRATE,
|
RawVisibilityId::PUB_CRATE => VIS_PUB_CRATE.get_or_init(|| {
|
||||||
|
RawVisibility::Module(
|
||||||
|
Interned::new(ModPath::from_kind(PathKind::Crate)),
|
||||||
|
VisibilityExplicitness::Explicit,
|
||||||
|
)
|
||||||
|
}),
|
||||||
_ => &self.data().vis.arena[Idx::from_raw(index.0.into())],
|
_ => &self.data().vis.arena[Idx::from_raw(index.0.into())],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -532,7 +532,7 @@ impl Printer<'_> {
|
||||||
|
|
||||||
w!(self, "<");
|
w!(self, "<");
|
||||||
let mut first = true;
|
let mut first = true;
|
||||||
for (idx, lt) in params.lifetimes.iter() {
|
for (idx, lt) in params.iter_lt() {
|
||||||
if !first {
|
if !first {
|
||||||
w!(self, ", ");
|
w!(self, ", ");
|
||||||
}
|
}
|
||||||
|
@ -540,7 +540,7 @@ impl Printer<'_> {
|
||||||
self.print_attrs_of(AttrOwner::LifetimeParamData(parent, idx), " ");
|
self.print_attrs_of(AttrOwner::LifetimeParamData(parent, idx), " ");
|
||||||
w!(self, "{}", lt.name.display(self.db.upcast()));
|
w!(self, "{}", lt.name.display(self.db.upcast()));
|
||||||
}
|
}
|
||||||
for (idx, x) in params.type_or_consts.iter() {
|
for (idx, x) in params.iter_type_or_consts() {
|
||||||
if !first {
|
if !first {
|
||||||
w!(self, ", ");
|
w!(self, ", ");
|
||||||
}
|
}
|
||||||
|
@ -570,13 +570,13 @@ impl Printer<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_where_clause(&mut self, params: &GenericParams) -> bool {
|
fn print_where_clause(&mut self, params: &GenericParams) -> bool {
|
||||||
if params.where_predicates.is_empty() {
|
if params.where_predicates().next().is_none() {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
w!(self, "\nwhere");
|
w!(self, "\nwhere");
|
||||||
self.indented(|this| {
|
self.indented(|this| {
|
||||||
for (i, pred) in params.where_predicates.iter().enumerate() {
|
for (i, pred) in params.where_predicates().enumerate() {
|
||||||
if i != 0 {
|
if i != 0 {
|
||||||
wln!(this, ",");
|
wln!(this, ",");
|
||||||
}
|
}
|
||||||
|
@ -607,12 +607,10 @@ impl Printer<'_> {
|
||||||
|
|
||||||
match target {
|
match target {
|
||||||
WherePredicateTypeTarget::TypeRef(ty) => this.print_type_ref(ty),
|
WherePredicateTypeTarget::TypeRef(ty) => this.print_type_ref(ty),
|
||||||
WherePredicateTypeTarget::TypeOrConstParam(id) => {
|
WherePredicateTypeTarget::TypeOrConstParam(id) => match params[*id].name() {
|
||||||
match ¶ms.type_or_consts[*id].name() {
|
Some(name) => w!(this, "{}", name.display(self.db.upcast())),
|
||||||
Some(name) => w!(this, "{}", name.display(self.db.upcast())),
|
None => w!(this, "_anon_{}", id.into_raw()),
|
||||||
None => w!(this, "_anon_{}", id.into_raw()),
|
},
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
w!(this, ": ");
|
w!(this, ": ");
|
||||||
this.print_type_bounds(std::slice::from_ref(bound));
|
this.print_type_bounds(std::slice::from_ref(bound));
|
||||||
|
|
|
@ -7,7 +7,6 @@
|
||||||
//! Note that `hir_def` is a work in progress, so not all of the above is
|
//! Note that `hir_def` is a work in progress, so not all of the above is
|
||||||
//! actually true.
|
//! actually true.
|
||||||
|
|
||||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
|
||||||
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
||||||
|
|
||||||
#[cfg(feature = "in-rust-tree")]
|
#[cfg(feature = "in-rust-tree")]
|
||||||
|
@ -117,6 +116,8 @@ pub struct ImportPathConfig {
|
||||||
pub prefer_no_std: bool,
|
pub prefer_no_std: bool,
|
||||||
/// If true, prefer import paths containing a prelude module.
|
/// If true, prefer import paths containing a prelude module.
|
||||||
pub prefer_prelude: bool,
|
pub prefer_prelude: bool,
|
||||||
|
/// If true, prefer abs path (starting with `::`) where it is available.
|
||||||
|
pub prefer_absolute: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -689,7 +690,7 @@ pub enum TypeOwnerId {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TypeOwnerId {
|
impl TypeOwnerId {
|
||||||
fn as_generic_def_id(self) -> Option<GenericDefId> {
|
fn as_generic_def_id(self, db: &dyn DefDatabase) -> Option<GenericDefId> {
|
||||||
Some(match self {
|
Some(match self {
|
||||||
TypeOwnerId::FunctionId(it) => GenericDefId::FunctionId(it),
|
TypeOwnerId::FunctionId(it) => GenericDefId::FunctionId(it),
|
||||||
TypeOwnerId::ConstId(it) => GenericDefId::ConstId(it),
|
TypeOwnerId::ConstId(it) => GenericDefId::ConstId(it),
|
||||||
|
@ -698,7 +699,9 @@ impl TypeOwnerId {
|
||||||
TypeOwnerId::TraitAliasId(it) => GenericDefId::TraitAliasId(it),
|
TypeOwnerId::TraitAliasId(it) => GenericDefId::TraitAliasId(it),
|
||||||
TypeOwnerId::TypeAliasId(it) => GenericDefId::TypeAliasId(it),
|
TypeOwnerId::TypeAliasId(it) => GenericDefId::TypeAliasId(it),
|
||||||
TypeOwnerId::ImplId(it) => GenericDefId::ImplId(it),
|
TypeOwnerId::ImplId(it) => GenericDefId::ImplId(it),
|
||||||
TypeOwnerId::EnumVariantId(it) => GenericDefId::EnumVariantId(it),
|
TypeOwnerId::EnumVariantId(it) => {
|
||||||
|
GenericDefId::AdtId(AdtId::EnumId(it.lookup(db).parent))
|
||||||
|
}
|
||||||
TypeOwnerId::InTypeConstId(_) | TypeOwnerId::StaticId(_) => return None,
|
TypeOwnerId::InTypeConstId(_) | TypeOwnerId::StaticId(_) => return None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -740,7 +743,6 @@ impl From<GenericDefId> for TypeOwnerId {
|
||||||
GenericDefId::TraitAliasId(it) => it.into(),
|
GenericDefId::TraitAliasId(it) => it.into(),
|
||||||
GenericDefId::TypeAliasId(it) => it.into(),
|
GenericDefId::TypeAliasId(it) => it.into(),
|
||||||
GenericDefId::ImplId(it) => it.into(),
|
GenericDefId::ImplId(it) => it.into(),
|
||||||
GenericDefId::EnumVariantId(it) => it.into(),
|
|
||||||
GenericDefId::ConstId(it) => it.into(),
|
GenericDefId::ConstId(it) => it.into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -849,8 +851,8 @@ impl GeneralConstId {
|
||||||
pub fn generic_def(self, db: &dyn DefDatabase) -> Option<GenericDefId> {
|
pub fn generic_def(self, db: &dyn DefDatabase) -> Option<GenericDefId> {
|
||||||
match self {
|
match self {
|
||||||
GeneralConstId::ConstId(it) => Some(it.into()),
|
GeneralConstId::ConstId(it) => Some(it.into()),
|
||||||
GeneralConstId::ConstBlockId(it) => it.lookup(db).parent.as_generic_def_id(),
|
GeneralConstId::ConstBlockId(it) => it.lookup(db).parent.as_generic_def_id(db),
|
||||||
GeneralConstId::InTypeConstId(it) => it.lookup(db).owner.as_generic_def_id(),
|
GeneralConstId::InTypeConstId(it) => it.lookup(db).owner.as_generic_def_id(db),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -888,12 +890,12 @@ impl From<EnumVariantId> for DefWithBodyId {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DefWithBodyId {
|
impl DefWithBodyId {
|
||||||
pub fn as_generic_def_id(self) -> Option<GenericDefId> {
|
pub fn as_generic_def_id(self, db: &dyn DefDatabase) -> Option<GenericDefId> {
|
||||||
match self {
|
match self {
|
||||||
DefWithBodyId::FunctionId(f) => Some(f.into()),
|
DefWithBodyId::FunctionId(f) => Some(f.into()),
|
||||||
DefWithBodyId::StaticId(_) => None,
|
DefWithBodyId::StaticId(_) => None,
|
||||||
DefWithBodyId::ConstId(c) => Some(c.into()),
|
DefWithBodyId::ConstId(c) => Some(c.into()),
|
||||||
DefWithBodyId::VariantId(c) => Some(c.into()),
|
DefWithBodyId::VariantId(c) => Some(c.lookup(db).parent.into()),
|
||||||
// FIXME: stable rust doesn't allow generics in constants, but we should
|
// FIXME: stable rust doesn't allow generics in constants, but we should
|
||||||
// use `TypeOwnerId::as_generic_def_id` when it does.
|
// use `TypeOwnerId::as_generic_def_id` when it does.
|
||||||
DefWithBodyId::InTypeConstId(_) => None,
|
DefWithBodyId::InTypeConstId(_) => None,
|
||||||
|
@ -921,10 +923,6 @@ pub enum GenericDefId {
|
||||||
TraitAliasId(TraitAliasId),
|
TraitAliasId(TraitAliasId),
|
||||||
TypeAliasId(TypeAliasId),
|
TypeAliasId(TypeAliasId),
|
||||||
ImplId(ImplId),
|
ImplId(ImplId),
|
||||||
// enum variants cannot have generics themselves, but their parent enums
|
|
||||||
// can, and this makes some code easier to write
|
|
||||||
// FIXME: Try to remove this as that will reduce the amount of query slots generated per enum?
|
|
||||||
EnumVariantId(EnumVariantId),
|
|
||||||
// consts can have type parameters from their parents (i.e. associated consts of traits)
|
// consts can have type parameters from their parents (i.e. associated consts of traits)
|
||||||
ConstId(ConstId),
|
ConstId(ConstId),
|
||||||
}
|
}
|
||||||
|
@ -935,7 +933,6 @@ impl_from!(
|
||||||
TraitAliasId,
|
TraitAliasId,
|
||||||
TypeAliasId,
|
TypeAliasId,
|
||||||
ImplId,
|
ImplId,
|
||||||
EnumVariantId,
|
|
||||||
ConstId
|
ConstId
|
||||||
for GenericDefId
|
for GenericDefId
|
||||||
);
|
);
|
||||||
|
@ -967,7 +964,6 @@ impl GenericDefId {
|
||||||
GenericDefId::TraitAliasId(it) => file_id_and_params_of_item_loc(db, it),
|
GenericDefId::TraitAliasId(it) => file_id_and_params_of_item_loc(db, it),
|
||||||
GenericDefId::ImplId(it) => file_id_and_params_of_item_loc(db, it),
|
GenericDefId::ImplId(it) => file_id_and_params_of_item_loc(db, it),
|
||||||
GenericDefId::ConstId(it) => (it.lookup(db).id.file_id(), None),
|
GenericDefId::ConstId(it) => (it.lookup(db).id.file_id(), None),
|
||||||
GenericDefId::EnumVariantId(it) => (it.lookup(db).id.file_id(), None),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -982,6 +978,14 @@ impl GenericDefId {
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn from_callable(db: &dyn DefDatabase, def: CallableDefId) -> GenericDefId {
|
||||||
|
match def {
|
||||||
|
CallableDefId::FunctionId(f) => f.into(),
|
||||||
|
CallableDefId::StructId(s) => s.into(),
|
||||||
|
CallableDefId::EnumVariantId(e) => e.lookup(db).parent.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<AssocItemId> for GenericDefId {
|
impl From<AssocItemId> for GenericDefId {
|
||||||
|
@ -994,6 +998,36 @@ impl From<AssocItemId> for GenericDefId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||||
|
pub enum CallableDefId {
|
||||||
|
FunctionId(FunctionId),
|
||||||
|
StructId(StructId),
|
||||||
|
EnumVariantId(EnumVariantId),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl InternValueTrivial for CallableDefId {}
|
||||||
|
|
||||||
|
impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId);
|
||||||
|
impl From<CallableDefId> for ModuleDefId {
|
||||||
|
fn from(def: CallableDefId) -> ModuleDefId {
|
||||||
|
match def {
|
||||||
|
CallableDefId::FunctionId(f) => ModuleDefId::FunctionId(f),
|
||||||
|
CallableDefId::StructId(s) => ModuleDefId::AdtId(AdtId::StructId(s)),
|
||||||
|
CallableDefId::EnumVariantId(e) => ModuleDefId::EnumVariantId(e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CallableDefId {
|
||||||
|
pub fn krate(self, db: &dyn DefDatabase) -> CrateId {
|
||||||
|
match self {
|
||||||
|
CallableDefId::FunctionId(f) => f.krate(db),
|
||||||
|
CallableDefId::StructId(s) => s.krate(db),
|
||||||
|
CallableDefId::EnumVariantId(e) => e.krate(db),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||||
pub enum AttrDefId {
|
pub enum AttrDefId {
|
||||||
ModuleId(ModuleId),
|
ModuleId(ModuleId),
|
||||||
|
@ -1310,7 +1344,6 @@ impl HasModule for GenericDefId {
|
||||||
GenericDefId::TraitAliasId(it) => it.module(db),
|
GenericDefId::TraitAliasId(it) => it.module(db),
|
||||||
GenericDefId::TypeAliasId(it) => it.module(db),
|
GenericDefId::TypeAliasId(it) => it.module(db),
|
||||||
GenericDefId::ImplId(it) => it.module(db),
|
GenericDefId::ImplId(it) => it.module(db),
|
||||||
GenericDefId::EnumVariantId(it) => it.module(db),
|
|
||||||
GenericDefId::ConstId(it) => it.module(db),
|
GenericDefId::ConstId(it) => it.module(db),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,6 +18,26 @@ pub struct LowerCtx<'a> {
|
||||||
span_map: OnceCell<SpanMap>,
|
span_map: OnceCell<SpanMap>,
|
||||||
ast_id_map: OnceCell<Arc<AstIdMap>>,
|
ast_id_map: OnceCell<Arc<AstIdMap>>,
|
||||||
impl_trait_bounds: RefCell<Vec<Vec<Interned<TypeBound>>>>,
|
impl_trait_bounds: RefCell<Vec<Vec<Interned<TypeBound>>>>,
|
||||||
|
// Prevent nested impl traits like `impl Foo<impl Bar>`.
|
||||||
|
outer_impl_trait: RefCell<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct OuterImplTraitGuard<'a> {
|
||||||
|
ctx: &'a LowerCtx<'a>,
|
||||||
|
old: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> OuterImplTraitGuard<'a> {
|
||||||
|
fn new(ctx: &'a LowerCtx<'a>, impl_trait: bool) -> Self {
|
||||||
|
let old = ctx.outer_impl_trait.replace(impl_trait);
|
||||||
|
Self { ctx, old }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Drop for OuterImplTraitGuard<'a> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
self.ctx.outer_impl_trait.replace(self.old);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> LowerCtx<'a> {
|
impl<'a> LowerCtx<'a> {
|
||||||
|
@ -28,6 +48,7 @@ impl<'a> LowerCtx<'a> {
|
||||||
span_map: OnceCell::new(),
|
span_map: OnceCell::new(),
|
||||||
ast_id_map: OnceCell::new(),
|
ast_id_map: OnceCell::new(),
|
||||||
impl_trait_bounds: RefCell::new(Vec::new()),
|
impl_trait_bounds: RefCell::new(Vec::new()),
|
||||||
|
outer_impl_trait: RefCell::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -42,6 +63,7 @@ impl<'a> LowerCtx<'a> {
|
||||||
span_map,
|
span_map,
|
||||||
ast_id_map: OnceCell::new(),
|
ast_id_map: OnceCell::new(),
|
||||||
impl_trait_bounds: RefCell::new(Vec::new()),
|
impl_trait_bounds: RefCell::new(Vec::new()),
|
||||||
|
outer_impl_trait: RefCell::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -67,4 +89,12 @@ impl<'a> LowerCtx<'a> {
|
||||||
pub fn take_impl_traits_bounds(&self) -> Vec<Vec<Interned<TypeBound>>> {
|
pub fn take_impl_traits_bounds(&self) -> Vec<Vec<Interned<TypeBound>>> {
|
||||||
self.impl_trait_bounds.take()
|
self.impl_trait_bounds.take()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn outer_impl_trait(&self) -> bool {
|
||||||
|
*self.outer_impl_trait.borrow()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn outer_impl_trait_scope(&'a self, impl_trait: bool) -> OuterImplTraitGuard<'a> {
|
||||||
|
OuterImplTraitGuard::new(self, impl_trait)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,6 +36,7 @@ macro_rules! m {
|
||||||
let _ = 'c';
|
let _ = 'c';
|
||||||
let _ = 1000;
|
let _ = 1000;
|
||||||
let _ = 12E+99_f64;
|
let _ = 12E+99_f64;
|
||||||
|
let _ = 45E+1234_f128;
|
||||||
let _ = "rust1";
|
let _ = "rust1";
|
||||||
let _ = -92;
|
let _ = -92;
|
||||||
}
|
}
|
||||||
|
@ -50,6 +51,7 @@ macro_rules! m {
|
||||||
let _ = 'c';
|
let _ = 'c';
|
||||||
let _ = 1000;
|
let _ = 1000;
|
||||||
let _ = 12E+99_f64;
|
let _ = 12E+99_f64;
|
||||||
|
let _ = 45E+1234_f128;
|
||||||
let _ = "rust1";
|
let _ = "rust1";
|
||||||
let _ = -92;
|
let _ = -92;
|
||||||
}
|
}
|
||||||
|
@ -58,6 +60,7 @@ fn f() {
|
||||||
let _ = 'c';
|
let _ = 'c';
|
||||||
let _ = 1000;
|
let _ = 1000;
|
||||||
let _ = 12E+99_f64;
|
let _ = 12E+99_f64;
|
||||||
|
let _ = 45E+1234_f128;
|
||||||
let _ = "rust1";
|
let _ = "rust1";
|
||||||
let _ = -92;
|
let _ = -92;
|
||||||
}
|
}
|
||||||
|
|
|
@ -103,12 +103,13 @@ const PREDEFINED_TOOLS: &[SmolStr] = &[
|
||||||
/// is computed by the `block_def_map` query.
|
/// is computed by the `block_def_map` query.
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub struct DefMap {
|
pub struct DefMap {
|
||||||
|
/// The crate this `DefMap` belongs to.
|
||||||
|
krate: CrateId,
|
||||||
/// When this is a block def map, this will hold the block id of the block and module that
|
/// When this is a block def map, this will hold the block id of the block and module that
|
||||||
/// contains this block.
|
/// contains this block.
|
||||||
block: Option<BlockInfo>,
|
block: Option<BlockInfo>,
|
||||||
/// The modules and their data declared in this crate.
|
/// The modules and their data declared in this crate.
|
||||||
pub modules: Arena<ModuleData>,
|
pub modules: Arena<ModuleData>,
|
||||||
krate: CrateId,
|
|
||||||
/// The prelude module for this crate. This either comes from an import
|
/// The prelude module for this crate. This either comes from an import
|
||||||
/// marked with the `prelude_import` attribute, or (in the normal case) from
|
/// marked with the `prelude_import` attribute, or (in the normal case) from
|
||||||
/// a dependency (`std` or `core`).
|
/// a dependency (`std` or `core`).
|
||||||
|
@ -124,6 +125,7 @@ pub struct DefMap {
|
||||||
|
|
||||||
/// Tracks which custom derives are in scope for an item, to allow resolution of derive helper
|
/// Tracks which custom derives are in scope for an item, to allow resolution of derive helper
|
||||||
/// attributes.
|
/// attributes.
|
||||||
|
// FIXME: Figure out a better way for the IDE layer to resolve these?
|
||||||
derive_helpers_in_scope: FxHashMap<AstId<ast::Item>, Vec<(Name, MacroId, MacroCallId)>>,
|
derive_helpers_in_scope: FxHashMap<AstId<ast::Item>, Vec<(Name, MacroId, MacroCallId)>>,
|
||||||
|
|
||||||
/// The diagnostics that need to be emitted for this crate.
|
/// The diagnostics that need to be emitted for this crate.
|
||||||
|
|
|
@ -83,7 +83,9 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
|
||||||
let name = Name::new_text_dont_use(it.name.clone());
|
let name = Name::new_text_dont_use(it.name.clone());
|
||||||
(
|
(
|
||||||
name,
|
name,
|
||||||
if it.disabled {
|
if !db.expand_proc_attr_macros() {
|
||||||
|
CustomProcMacroExpander::dummy()
|
||||||
|
} else if it.disabled {
|
||||||
CustomProcMacroExpander::disabled()
|
CustomProcMacroExpander::disabled()
|
||||||
} else {
|
} else {
|
||||||
CustomProcMacroExpander::new(hir_expand::proc_macro::ProcMacroId::new(
|
CustomProcMacroExpander::new(hir_expand::proc_macro::ProcMacroId::new(
|
||||||
|
@ -1331,16 +1333,6 @@ impl DefCollector<'_> {
|
||||||
|
|
||||||
let call_id = call_id();
|
let call_id = call_id();
|
||||||
if let MacroDefKind::ProcMacro(_, exp, _) = def.kind {
|
if let MacroDefKind::ProcMacro(_, exp, _) = def.kind {
|
||||||
// If proc attribute macro expansion is disabled, skip expanding it here
|
|
||||||
if !self.db.expand_proc_attr_macros() {
|
|
||||||
self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
|
|
||||||
directive.module_id,
|
|
||||||
self.db.lookup_intern_macro_call(call_id).kind,
|
|
||||||
def.krate,
|
|
||||||
));
|
|
||||||
return recollect_without(self);
|
|
||||||
}
|
|
||||||
|
|
||||||
// If there's no expander for the proc macro (e.g.
|
// If there's no expander for the proc macro (e.g.
|
||||||
// because proc macros are disabled, or building the
|
// because proc macros are disabled, or building the
|
||||||
// proc macro crate failed), report this and skip
|
// proc macro crate failed), report this and skip
|
||||||
|
|
|
@ -17,16 +17,47 @@ use crate::{
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub enum DefDiagnosticKind {
|
pub enum DefDiagnosticKind {
|
||||||
UnresolvedModule { ast: AstId<ast::Module>, candidates: Box<[String]> },
|
UnresolvedModule {
|
||||||
UnresolvedExternCrate { ast: AstId<ast::ExternCrate> },
|
ast: AstId<ast::Module>,
|
||||||
UnresolvedImport { id: ItemTreeId<item_tree::Use>, index: Idx<ast::UseTree> },
|
candidates: Box<[String]>,
|
||||||
UnconfiguredCode { ast: ErasedAstId, cfg: CfgExpr, opts: CfgOptions },
|
},
|
||||||
UnresolvedProcMacro { ast: MacroCallKind, krate: CrateId },
|
UnresolvedExternCrate {
|
||||||
UnresolvedMacroCall { ast: MacroCallKind, path: ModPath },
|
ast: AstId<ast::ExternCrate>,
|
||||||
UnimplementedBuiltinMacro { ast: AstId<ast::Macro> },
|
},
|
||||||
InvalidDeriveTarget { ast: AstId<ast::Item>, id: usize },
|
UnresolvedImport {
|
||||||
MalformedDerive { ast: AstId<ast::Adt>, id: usize },
|
id: ItemTreeId<item_tree::Use>,
|
||||||
MacroDefError { ast: AstId<ast::Macro>, message: String },
|
index: Idx<ast::UseTree>,
|
||||||
|
},
|
||||||
|
UnconfiguredCode {
|
||||||
|
ast: ErasedAstId,
|
||||||
|
cfg: CfgExpr,
|
||||||
|
opts: CfgOptions,
|
||||||
|
},
|
||||||
|
/// A proc-macro that is lacking an expander, this might be due to build scripts not yet having
|
||||||
|
/// run or proc-macro expansion being disabled.
|
||||||
|
UnresolvedProcMacro {
|
||||||
|
ast: MacroCallKind,
|
||||||
|
krate: CrateId,
|
||||||
|
},
|
||||||
|
UnresolvedMacroCall {
|
||||||
|
ast: MacroCallKind,
|
||||||
|
path: ModPath,
|
||||||
|
},
|
||||||
|
UnimplementedBuiltinMacro {
|
||||||
|
ast: AstId<ast::Macro>,
|
||||||
|
},
|
||||||
|
InvalidDeriveTarget {
|
||||||
|
ast: AstId<ast::Item>,
|
||||||
|
id: usize,
|
||||||
|
},
|
||||||
|
MalformedDerive {
|
||||||
|
ast: AstId<ast::Adt>,
|
||||||
|
id: usize,
|
||||||
|
},
|
||||||
|
MacroDefError {
|
||||||
|
ast: AstId<ast::Macro>,
|
||||||
|
message: String,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
|
@ -92,10 +123,6 @@ impl DefDiagnostic {
|
||||||
Self { in_module: container, kind: DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } }
|
Self { in_module: container, kind: DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } }
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: Whats the difference between this and unresolved_macro_call
|
|
||||||
// FIXME: This is used for a lot of things, unresolved proc macros, disabled proc macros, etc
|
|
||||||
// yet the diagnostic handler in ide-diagnostics has to figure out what happened because this
|
|
||||||
// struct loses all that information!
|
|
||||||
pub fn unresolved_proc_macro(
|
pub fn unresolved_proc_macro(
|
||||||
container: LocalModuleId,
|
container: LocalModuleId,
|
||||||
ast: MacroCallKind,
|
ast: MacroCallKind,
|
||||||
|
|
|
@ -9,7 +9,7 @@ use hir_expand::{
|
||||||
name::{name, AsName},
|
name::{name, AsName},
|
||||||
};
|
};
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
use syntax::ast::{self, AstNode, HasTypeBounds};
|
use syntax::ast::{self, AstNode, HasGenericArgs, HasTypeBounds};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
path::{AssociatedTypeBinding, GenericArg, GenericArgs, ModPath, Path, PathKind},
|
path::{AssociatedTypeBinding, GenericArg, GenericArgs, ModPath, Path, PathKind},
|
||||||
|
@ -202,6 +202,8 @@ pub(super) fn lower_generic_args(
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if let Some(name_ref) = assoc_type_arg.name_ref() {
|
if let Some(name_ref) = assoc_type_arg.name_ref() {
|
||||||
|
// Nested impl traits like `impl Foo<Assoc = impl Bar>` are allowed
|
||||||
|
let _guard = lower_ctx.outer_impl_trait_scope(false);
|
||||||
let name = name_ref.as_name();
|
let name = name_ref.as_name();
|
||||||
let args = assoc_type_arg
|
let args = assoc_type_arg
|
||||||
.generic_arg_list()
|
.generic_arg_list()
|
||||||
|
|
|
@ -596,7 +596,7 @@ impl Resolver {
|
||||||
Scope::GenericParams { params, def } => Some((params, def)),
|
Scope::GenericParams { params, def } => Some((params, def)),
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
.flat_map(|(params, def)| params.where_predicates.iter().zip(iter::repeat(def)))
|
.flat_map(|(params, def)| params.where_predicates().zip(iter::repeat(def)))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn generic_def(&self) -> Option<GenericDefId> {
|
pub fn generic_def(&self) -> Option<GenericDefId> {
|
||||||
|
@ -758,10 +758,10 @@ impl Scope {
|
||||||
}
|
}
|
||||||
Scope::GenericParams { params, def: parent } => {
|
Scope::GenericParams { params, def: parent } => {
|
||||||
let parent = *parent;
|
let parent = *parent;
|
||||||
for (local_id, param) in params.type_or_consts.iter() {
|
for (local_id, param) in params.iter_type_or_consts() {
|
||||||
if let Some(name) = ¶m.name() {
|
if let Some(name) = ¶m.name() {
|
||||||
let id = TypeOrConstParamId { parent, local_id };
|
let id = TypeOrConstParamId { parent, local_id };
|
||||||
let data = &db.generic_params(parent).type_or_consts[local_id];
|
let data = &db.generic_params(parent)[local_id];
|
||||||
acc.add(
|
acc.add(
|
||||||
name,
|
name,
|
||||||
ScopeDef::GenericParam(match data {
|
ScopeDef::GenericParam(match data {
|
||||||
|
@ -775,7 +775,7 @@ impl Scope {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (local_id, param) in params.lifetimes.iter() {
|
for (local_id, param) in params.iter_lt() {
|
||||||
let id = LifetimeParamId { parent, local_id };
|
let id = LifetimeParamId { parent, local_id };
|
||||||
acc.add(¶m.name, ScopeDef::GenericParam(id.into()))
|
acc.add(¶m.name, ScopeDef::GenericParam(id.into()))
|
||||||
}
|
}
|
||||||
|
@ -1164,7 +1164,6 @@ impl HasResolver for GenericDefId {
|
||||||
GenericDefId::TraitAliasId(inner) => inner.resolver(db),
|
GenericDefId::TraitAliasId(inner) => inner.resolver(db),
|
||||||
GenericDefId::TypeAliasId(inner) => inner.resolver(db),
|
GenericDefId::TypeAliasId(inner) => inner.resolver(db),
|
||||||
GenericDefId::ImplId(inner) => inner.resolver(db),
|
GenericDefId::ImplId(inner) => inner.resolver(db),
|
||||||
GenericDefId::EnumVariantId(inner) => inner.resolver(db),
|
|
||||||
GenericDefId::ConstId(inner) => inner.resolver(db),
|
GenericDefId::ConstId(inner) => inner.resolver(db),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -64,7 +64,7 @@ impl HasChildSource<LocalTypeOrConstParamId> for GenericDefId {
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
) -> InFile<ArenaMap<LocalTypeOrConstParamId, Self::Value>> {
|
) -> InFile<ArenaMap<LocalTypeOrConstParamId, Self::Value>> {
|
||||||
let generic_params = db.generic_params(*self);
|
let generic_params = db.generic_params(*self);
|
||||||
let mut idx_iter = generic_params.type_or_consts.iter().map(|(idx, _)| idx);
|
let mut idx_iter = generic_params.iter_type_or_consts().map(|(idx, _)| idx);
|
||||||
|
|
||||||
let (file_id, generic_params_list) = self.file_id_and_params_of(db);
|
let (file_id, generic_params_list) = self.file_id_and_params_of(db);
|
||||||
|
|
||||||
|
@ -103,7 +103,7 @@ impl HasChildSource<LocalLifetimeParamId> for GenericDefId {
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
) -> InFile<ArenaMap<LocalLifetimeParamId, Self::Value>> {
|
) -> InFile<ArenaMap<LocalLifetimeParamId, Self::Value>> {
|
||||||
let generic_params = db.generic_params(*self);
|
let generic_params = db.generic_params(*self);
|
||||||
let idx_iter = generic_params.lifetimes.iter().map(|(idx, _)| idx);
|
let idx_iter = generic_params.iter_lt().map(|(idx, _)| idx);
|
||||||
|
|
||||||
let (file_id, generic_params_list) = self.file_id_and_params_of(db);
|
let (file_id, generic_params_list) = self.file_id_and_params_of(db);
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
use std::iter;
|
use std::iter;
|
||||||
|
|
||||||
|
use intern::Interned;
|
||||||
use la_arena::ArenaMap;
|
use la_arena::ArenaMap;
|
||||||
use span::SyntaxContextId;
|
use span::SyntaxContextId;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
|
@ -20,14 +21,17 @@ use crate::{
|
||||||
pub enum RawVisibility {
|
pub enum RawVisibility {
|
||||||
/// `pub(in module)`, `pub(crate)` or `pub(super)`. Also private, which is
|
/// `pub(in module)`, `pub(crate)` or `pub(super)`. Also private, which is
|
||||||
/// equivalent to `pub(self)`.
|
/// equivalent to `pub(self)`.
|
||||||
Module(ModPath, VisibilityExplicitness),
|
Module(Interned<ModPath>, VisibilityExplicitness),
|
||||||
/// `pub`.
|
/// `pub`.
|
||||||
Public,
|
Public,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RawVisibility {
|
impl RawVisibility {
|
||||||
pub(crate) const fn private() -> RawVisibility {
|
pub(crate) fn private() -> RawVisibility {
|
||||||
RawVisibility::Module(ModPath::from_kind(PathKind::SELF), VisibilityExplicitness::Implicit)
|
RawVisibility::Module(
|
||||||
|
Interned::new(ModPath::from_kind(PathKind::SELF)),
|
||||||
|
VisibilityExplicitness::Implicit,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn from_ast(
|
pub(crate) fn from_ast(
|
||||||
|
@ -60,7 +64,7 @@ impl RawVisibility {
|
||||||
ast::VisibilityKind::PubSelf => ModPath::from_kind(PathKind::SELF),
|
ast::VisibilityKind::PubSelf => ModPath::from_kind(PathKind::SELF),
|
||||||
ast::VisibilityKind::Pub => return RawVisibility::Public,
|
ast::VisibilityKind::Pub => return RawVisibility::Public,
|
||||||
};
|
};
|
||||||
RawVisibility::Module(path, VisibilityExplicitness::Explicit)
|
RawVisibility::Module(Interned::new(path), VisibilityExplicitness::Explicit)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolve(
|
pub fn resolve(
|
||||||
|
|
|
@ -25,7 +25,8 @@ impl ChangeWithProcMacros {
|
||||||
|
|
||||||
pub fn apply(self, db: &mut (impl ExpandDatabase + SourceDatabaseExt)) {
|
pub fn apply(self, db: &mut (impl ExpandDatabase + SourceDatabaseExt)) {
|
||||||
self.source_change.apply(db);
|
self.source_change.apply(db);
|
||||||
if let Some(proc_macros) = self.proc_macros {
|
if let Some(mut proc_macros) = self.proc_macros {
|
||||||
|
proc_macros.shrink_to_fit();
|
||||||
db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH);
|
db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH);
|
||||||
}
|
}
|
||||||
if let Some(target_data_layouts) = self.target_data_layouts {
|
if let Some(target_data_layouts) = self.target_data_layouts {
|
||||||
|
|
|
@ -172,15 +172,30 @@ impl DeclarativeMacroExpander {
|
||||||
),
|
),
|
||||||
ast::Macro::MacroDef(macro_def) => (
|
ast::Macro::MacroDef(macro_def) => (
|
||||||
match macro_def.body() {
|
match macro_def.body() {
|
||||||
Some(arg) => {
|
Some(body) => {
|
||||||
let tt = mbe::syntax_node_to_token_tree(
|
let span =
|
||||||
arg.syntax(),
|
map.span_for_range(macro_def.macro_token().unwrap().text_range());
|
||||||
|
let args = macro_def.args().map(|args| {
|
||||||
|
mbe::syntax_node_to_token_tree(
|
||||||
|
args.syntax(),
|
||||||
|
map.as_ref(),
|
||||||
|
span,
|
||||||
|
DocCommentDesugarMode::Mbe,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
let body = mbe::syntax_node_to_token_tree(
|
||||||
|
body.syntax(),
|
||||||
map.as_ref(),
|
map.as_ref(),
|
||||||
map.span_for_range(macro_def.macro_token().unwrap().text_range()),
|
span,
|
||||||
DocCommentDesugarMode::Mbe,
|
DocCommentDesugarMode::Mbe,
|
||||||
);
|
);
|
||||||
|
|
||||||
mbe::DeclarativeMacro::parse_macro2(&tt, edition, new_meta_vars)
|
mbe::DeclarativeMacro::parse_macro2(
|
||||||
|
args.as_ref(),
|
||||||
|
&body,
|
||||||
|
edition,
|
||||||
|
new_meta_vars,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
None => mbe::DeclarativeMacro::from_err(mbe::ParseError::Expected(
|
None => mbe::DeclarativeMacro::from_err(mbe::ParseError::Expected(
|
||||||
"expected a token tree".into(),
|
"expected a token tree".into(),
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
//! Things to wrap other things in file ids.
|
//! Things to wrap other things in file ids.
|
||||||
|
use std::borrow::Borrow;
|
||||||
|
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use span::{
|
use span::{
|
||||||
AstIdNode, ErasedFileAstId, FileAstId, FileId, FileRange, HirFileId, HirFileIdRepr,
|
AstIdNode, ErasedFileAstId, FileAstId, FileId, FileRange, HirFileId, HirFileIdRepr,
|
||||||
|
@ -76,6 +78,13 @@ impl<FileKind: Copy, T> InFileWrapper<FileKind, T> {
|
||||||
pub fn as_ref(&self) -> InFileWrapper<FileKind, &T> {
|
pub fn as_ref(&self) -> InFileWrapper<FileKind, &T> {
|
||||||
self.with_value(&self.value)
|
self.with_value(&self.value)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn borrow<U>(&self) -> InFileWrapper<FileKind, &U>
|
||||||
|
where
|
||||||
|
T: Borrow<U>,
|
||||||
|
{
|
||||||
|
self.with_value(self.value.borrow())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<FileKind: Copy, T: Clone> InFileWrapper<FileKind, &T> {
|
impl<FileKind: Copy, T: Clone> InFileWrapper<FileKind, &T> {
|
||||||
|
@ -156,14 +165,61 @@ impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, &N> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// region:specific impls
|
// region:specific impls
|
||||||
|
impl<SN: Borrow<SyntaxNode>> InRealFile<SN> {
|
||||||
|
pub fn file_range(&self) -> FileRange {
|
||||||
|
FileRange { file_id: self.file_id, range: self.value.borrow().text_range() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<SN: Borrow<SyntaxNode>> InFile<SN> {
|
||||||
|
pub fn parent_ancestors_with_macros(
|
||||||
|
self,
|
||||||
|
db: &dyn db::ExpandDatabase,
|
||||||
|
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
|
||||||
|
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
|
||||||
|
Some(parent) => Some(node.with_value(parent)),
|
||||||
|
None => db
|
||||||
|
.lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id)
|
||||||
|
.to_node_item(db)
|
||||||
|
.syntax()
|
||||||
|
.cloned()
|
||||||
|
.map(|node| node.parent())
|
||||||
|
.transpose(),
|
||||||
|
};
|
||||||
|
std::iter::successors(succ(&self.borrow().cloned()), succ)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn ancestors_with_macros(
|
||||||
|
self,
|
||||||
|
db: &dyn db::ExpandDatabase,
|
||||||
|
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
|
||||||
|
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
|
||||||
|
Some(parent) => Some(node.with_value(parent)),
|
||||||
|
None => db
|
||||||
|
.lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id)
|
||||||
|
.to_node_item(db)
|
||||||
|
.syntax()
|
||||||
|
.cloned()
|
||||||
|
.map(|node| node.parent())
|
||||||
|
.transpose(),
|
||||||
|
};
|
||||||
|
std::iter::successors(Some(self.borrow().cloned()), succ)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn kind(&self) -> parser::SyntaxKind {
|
||||||
|
self.value.borrow().kind()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn text_range(&self) -> TextRange {
|
||||||
|
self.value.borrow().text_range()
|
||||||
|
}
|
||||||
|
|
||||||
impl InFile<&SyntaxNode> {
|
|
||||||
/// Falls back to the macro call range if the node cannot be mapped up fully.
|
/// Falls back to the macro call range if the node cannot be mapped up fully.
|
||||||
///
|
///
|
||||||
/// For attributes and derives, this will point back to the attribute only.
|
/// For attributes and derives, this will point back to the attribute only.
|
||||||
/// For the entire item use [`InFile::original_file_range_full`].
|
/// For the entire item use [`InFile::original_file_range_full`].
|
||||||
pub fn original_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange {
|
pub fn original_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange {
|
||||||
self.map(SyntaxNode::text_range).original_node_file_range_rooted(db)
|
self.borrow().map(SyntaxNode::text_range).original_node_file_range_rooted(db)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Falls back to the macro call range if the node cannot be mapped up fully.
|
/// Falls back to the macro call range if the node cannot be mapped up fully.
|
||||||
|
@ -171,15 +227,7 @@ impl InFile<&SyntaxNode> {
|
||||||
self,
|
self,
|
||||||
db: &dyn db::ExpandDatabase,
|
db: &dyn db::ExpandDatabase,
|
||||||
) -> FileRange {
|
) -> FileRange {
|
||||||
self.map(SyntaxNode::text_range).original_node_file_range_with_macro_call_body(db)
|
self.borrow().map(SyntaxNode::text_range).original_node_file_range_with_macro_call_body(db)
|
||||||
}
|
|
||||||
|
|
||||||
/// Attempts to map the syntax node back up its macro calls.
|
|
||||||
pub fn original_file_range_opt(
|
|
||||||
self,
|
|
||||||
db: &dyn db::ExpandDatabase,
|
|
||||||
) -> Option<(FileRange, SyntaxContextId)> {
|
|
||||||
self.map(SyntaxNode::text_range).original_node_file_range_opt(db)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn original_syntax_node_rooted(
|
pub fn original_syntax_node_rooted(
|
||||||
|
@ -190,16 +238,19 @@ impl InFile<&SyntaxNode> {
|
||||||
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
|
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
|
||||||
let file_id = match self.file_id.repr() {
|
let file_id = match self.file_id.repr() {
|
||||||
HirFileIdRepr::FileId(file_id) => {
|
HirFileIdRepr::FileId(file_id) => {
|
||||||
return Some(InRealFile { file_id, value: self.value.clone() })
|
return Some(InRealFile { file_id, value: self.value.borrow().clone() })
|
||||||
}
|
}
|
||||||
HirFileIdRepr::MacroFile(m) if m.is_attr_macro(db) => m,
|
HirFileIdRepr::MacroFile(m) if m.is_attr_macro(db) => m,
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let FileRange { file_id, range } =
|
let FileRange { file_id, range } = map_node_range_up_rooted(
|
||||||
map_node_range_up_rooted(db, &db.expansion_span_map(file_id), self.value.text_range())?;
|
db,
|
||||||
|
&db.expansion_span_map(file_id),
|
||||||
|
self.value.borrow().text_range(),
|
||||||
|
)?;
|
||||||
|
|
||||||
let kind = self.value.kind();
|
let kind = self.kind();
|
||||||
let value = db
|
let value = db
|
||||||
.parse(file_id)
|
.parse(file_id)
|
||||||
.syntax_node()
|
.syntax_node()
|
||||||
|
@ -211,6 +262,16 @@ impl InFile<&SyntaxNode> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl InFile<&SyntaxNode> {
|
||||||
|
/// Attempts to map the syntax node back up its macro calls.
|
||||||
|
pub fn original_file_range_opt(
|
||||||
|
self,
|
||||||
|
db: &dyn db::ExpandDatabase,
|
||||||
|
) -> Option<(FileRange, SyntaxContextId)> {
|
||||||
|
self.borrow().map(SyntaxNode::text_range).original_node_file_range_opt(db)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl InMacroFile<SyntaxToken> {
|
impl InMacroFile<SyntaxToken> {
|
||||||
pub fn upmap_once(
|
pub fn upmap_once(
|
||||||
self,
|
self,
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
//! tree originates not from the text of some `FileId`, but from some macro
|
//! tree originates not from the text of some `FileId`, but from some macro
|
||||||
//! expansion.
|
//! expansion.
|
||||||
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
||||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
|
||||||
|
|
||||||
pub mod attrs;
|
pub mod attrs;
|
||||||
pub mod builtin_attr_macro;
|
pub mod builtin_attr_macro;
|
||||||
|
|
|
@ -275,8 +275,10 @@ pub mod known {
|
||||||
u32,
|
u32,
|
||||||
u64,
|
u64,
|
||||||
u128,
|
u128,
|
||||||
|
f16,
|
||||||
f32,
|
f32,
|
||||||
f64,
|
f64,
|
||||||
|
f128,
|
||||||
bool,
|
bool,
|
||||||
char,
|
char,
|
||||||
str,
|
str,
|
||||||
|
|
|
@ -33,6 +33,7 @@ triomphe.workspace = true
|
||||||
nohash-hasher.workspace = true
|
nohash-hasher.workspace = true
|
||||||
typed-arena = "2.0.1"
|
typed-arena = "2.0.1"
|
||||||
indexmap.workspace = true
|
indexmap.workspace = true
|
||||||
|
rustc_apfloat = "0.2.0"
|
||||||
|
|
||||||
ra-ap-rustc_abi.workspace = true
|
ra-ap-rustc_abi.workspace = true
|
||||||
ra-ap-rustc_index.workspace = true
|
ra-ap-rustc_index.workspace = true
|
||||||
|
|
|
@ -63,7 +63,14 @@ impl<D> TyBuilder<D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_internal(self) -> (D, Substitution) {
|
fn build_internal(self) -> (D, Substitution) {
|
||||||
assert_eq!(self.vec.len(), self.param_kinds.len(), "{:?}", &self.param_kinds);
|
assert_eq!(
|
||||||
|
self.vec.len(),
|
||||||
|
self.param_kinds.len(),
|
||||||
|
"{} args received, {} expected ({:?})",
|
||||||
|
self.vec.len(),
|
||||||
|
self.param_kinds.len(),
|
||||||
|
&self.param_kinds
|
||||||
|
);
|
||||||
for (a, e) in self.vec.iter().zip(self.param_kinds.iter()) {
|
for (a, e) in self.vec.iter().zip(self.param_kinds.iter()) {
|
||||||
self.assert_match_kind(a, e);
|
self.assert_match_kind(a, e);
|
||||||
}
|
}
|
||||||
|
@ -252,8 +259,9 @@ impl TyBuilder<()> {
|
||||||
/// This method prepopulates the builder with placeholder substitution of `parent`, so you
|
/// This method prepopulates the builder with placeholder substitution of `parent`, so you
|
||||||
/// should only push exactly 3 `GenericArg`s before building.
|
/// should only push exactly 3 `GenericArg`s before building.
|
||||||
pub fn subst_for_coroutine(db: &dyn HirDatabase, parent: DefWithBodyId) -> TyBuilder<()> {
|
pub fn subst_for_coroutine(db: &dyn HirDatabase, parent: DefWithBodyId) -> TyBuilder<()> {
|
||||||
let parent_subst =
|
let parent_subst = parent
|
||||||
parent.as_generic_def_id().map(|p| generics(db.upcast(), p).placeholder_subst(db));
|
.as_generic_def_id(db.upcast())
|
||||||
|
.map(|p| generics(db.upcast(), p).placeholder_subst(db));
|
||||||
// These represent resume type, yield type, and return type of coroutine.
|
// These represent resume type, yield type, and return type of coroutine.
|
||||||
let params = std::iter::repeat(ParamKind::Type).take(3).collect();
|
let params = std::iter::repeat(ParamKind::Type).take(3).collect();
|
||||||
TyBuilder::new((), params, parent_subst)
|
TyBuilder::new((), params, parent_subst)
|
||||||
|
@ -266,7 +274,7 @@ impl TyBuilder<()> {
|
||||||
) -> Substitution {
|
) -> Substitution {
|
||||||
let sig_ty = sig_ty.cast(Interner);
|
let sig_ty = sig_ty.cast(Interner);
|
||||||
let self_subst = iter::once(&sig_ty);
|
let self_subst = iter::once(&sig_ty);
|
||||||
let Some(parent) = parent.as_generic_def_id() else {
|
let Some(parent) = parent.as_generic_def_id(db.upcast()) else {
|
||||||
return Substitution::from_iter(Interner, self_subst);
|
return Substitution::from_iter(Interner, self_subst);
|
||||||
};
|
};
|
||||||
Substitution::from_iter(
|
Substitution::from_iter(
|
||||||
|
@ -296,7 +304,8 @@ impl TyBuilder<hir_def::AdtId> {
|
||||||
) -> Self {
|
) -> Self {
|
||||||
// Note that we're building ADT, so we never have parent generic parameters.
|
// Note that we're building ADT, so we never have parent generic parameters.
|
||||||
let defaults = db.generic_defaults(self.data.into());
|
let defaults = db.generic_defaults(self.data.into());
|
||||||
for default_ty in defaults.iter().skip(self.vec.len()) {
|
|
||||||
|
for default_ty in &defaults[self.vec.len()..] {
|
||||||
// NOTE(skip_binders): we only check if the arg type is error type.
|
// NOTE(skip_binders): we only check if the arg type is error type.
|
||||||
if let Some(x) = default_ty.skip_binders().ty(Interner) {
|
if let Some(x) = default_ty.skip_binders().ty(Interner) {
|
||||||
if x.is_unknown() {
|
if x.is_unknown() {
|
||||||
|
|
|
@ -13,7 +13,8 @@ use hir_def::{
|
||||||
data::adt::StructFlags,
|
data::adt::StructFlags,
|
||||||
hir::Movability,
|
hir::Movability,
|
||||||
lang_item::{LangItem, LangItemTarget},
|
lang_item::{LangItem, LangItemTarget},
|
||||||
AssocItemId, BlockId, GenericDefId, HasModule, ItemContainerId, Lookup, TypeAliasId, VariantId,
|
AssocItemId, BlockId, CallableDefId, GenericDefId, HasModule, ItemContainerId, Lookup,
|
||||||
|
TypeAliasId, VariantId,
|
||||||
};
|
};
|
||||||
use hir_expand::name::name;
|
use hir_expand::name::name;
|
||||||
|
|
||||||
|
@ -28,9 +29,9 @@ use crate::{
|
||||||
to_assoc_type_id, to_chalk_trait_id,
|
to_assoc_type_id, to_chalk_trait_id,
|
||||||
traits::ChalkContext,
|
traits::ChalkContext,
|
||||||
utils::ClosureSubst,
|
utils::ClosureSubst,
|
||||||
wrap_empty_binders, AliasEq, AliasTy, BoundVar, CallableDefId, DebruijnIndex, FnDefId,
|
wrap_empty_binders, AliasEq, AliasTy, BoundVar, DebruijnIndex, FnDefId, Interner, ProjectionTy,
|
||||||
Interner, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Substitution, TraitRef,
|
ProjectionTyExt, QuantifiedWhereClause, Substitution, TraitRef, TraitRefExt, Ty, TyBuilder,
|
||||||
TraitRefExt, Ty, TyBuilder, TyExt, TyKind, WhereClause,
|
TyExt, TyKind, WhereClause,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum<Interner>;
|
pub(crate) type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum<Interner>;
|
||||||
|
@ -102,7 +103,7 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
|
||||||
&self,
|
&self,
|
||||||
fn_def_id: chalk_ir::FnDefId<Interner>,
|
fn_def_id: chalk_ir::FnDefId<Interner>,
|
||||||
) -> Arc<rust_ir::FnDefDatum<Interner>> {
|
) -> Arc<rust_ir::FnDefDatum<Interner>> {
|
||||||
self.db.fn_def_datum(self.krate, fn_def_id)
|
self.db.fn_def_datum(fn_def_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn impls_for_trait(
|
fn impls_for_trait(
|
||||||
|
@ -912,16 +913,13 @@ fn type_alias_associated_ty_value(
|
||||||
Arc::new(value)
|
Arc::new(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn fn_def_datum_query(
|
pub(crate) fn fn_def_datum_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Arc<FnDefDatum> {
|
||||||
db: &dyn HirDatabase,
|
|
||||||
_krate: CrateId,
|
|
||||||
fn_def_id: FnDefId,
|
|
||||||
) -> Arc<FnDefDatum> {
|
|
||||||
let callable_def: CallableDefId = from_chalk(db, fn_def_id);
|
let callable_def: CallableDefId = from_chalk(db, fn_def_id);
|
||||||
let generic_params = generics(db.upcast(), callable_def.into());
|
let generic_def = GenericDefId::from_callable(db.upcast(), callable_def);
|
||||||
|
let generic_params = generics(db.upcast(), generic_def);
|
||||||
let (sig, binders) = db.callable_item_signature(callable_def).into_value_and_skipped_binders();
|
let (sig, binders) = db.callable_item_signature(callable_def).into_value_and_skipped_binders();
|
||||||
let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
|
let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
|
||||||
let where_clauses = convert_where_clauses(db, callable_def.into(), &bound_vars);
|
let where_clauses = convert_where_clauses(db, generic_def, &bound_vars);
|
||||||
let bound = rust_ir::FnDefDatumBound {
|
let bound = rust_ir::FnDefDatumBound {
|
||||||
// Note: Chalk doesn't actually use this information yet as far as I am aware, but we provide it anyway
|
// Note: Chalk doesn't actually use this information yet as far as I am aware, but we provide it anyway
|
||||||
inputs_and_output: chalk_ir::Binders::empty(
|
inputs_and_output: chalk_ir::Binders::empty(
|
||||||
|
@ -948,7 +946,8 @@ pub(crate) fn fn_def_datum_query(
|
||||||
|
|
||||||
pub(crate) fn fn_def_variance_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Variances {
|
pub(crate) fn fn_def_variance_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Variances {
|
||||||
let callable_def: CallableDefId = from_chalk(db, fn_def_id);
|
let callable_def: CallableDefId = from_chalk(db, fn_def_id);
|
||||||
let generic_params = generics(db.upcast(), callable_def.into());
|
let generic_params =
|
||||||
|
generics(db.upcast(), GenericDefId::from_callable(db.upcast(), callable_def));
|
||||||
Variances::from_iter(
|
Variances::from_iter(
|
||||||
Interner,
|
Interner,
|
||||||
std::iter::repeat(chalk_ir::Variance::Invariant).take(generic_params.len()),
|
std::iter::repeat(chalk_ir::Variance::Invariant).take(generic_params.len()),
|
||||||
|
|
|
@ -119,8 +119,10 @@ impl TyExt for Ty {
|
||||||
TyKind::Scalar(Scalar::Bool) => Some(BuiltinType::Bool),
|
TyKind::Scalar(Scalar::Bool) => Some(BuiltinType::Bool),
|
||||||
TyKind::Scalar(Scalar::Char) => Some(BuiltinType::Char),
|
TyKind::Scalar(Scalar::Char) => Some(BuiltinType::Char),
|
||||||
TyKind::Scalar(Scalar::Float(fty)) => Some(BuiltinType::Float(match fty {
|
TyKind::Scalar(Scalar::Float(fty)) => Some(BuiltinType::Float(match fty {
|
||||||
|
FloatTy::F128 => BuiltinFloat::F128,
|
||||||
FloatTy::F64 => BuiltinFloat::F64,
|
FloatTy::F64 => BuiltinFloat::F64,
|
||||||
FloatTy::F32 => BuiltinFloat::F32,
|
FloatTy::F32 => BuiltinFloat::F32,
|
||||||
|
FloatTy::F16 => BuiltinFloat::F16,
|
||||||
})),
|
})),
|
||||||
TyKind::Scalar(Scalar::Int(ity)) => Some(BuiltinType::Int(match ity {
|
TyKind::Scalar(Scalar::Int(ity)) => Some(BuiltinType::Int(match ity {
|
||||||
IntTy::Isize => BuiltinInt::Isize,
|
IntTy::Isize => BuiltinInt::Isize,
|
||||||
|
@ -188,9 +190,10 @@ impl TyExt for Ty {
|
||||||
fn as_generic_def(&self, db: &dyn HirDatabase) -> Option<GenericDefId> {
|
fn as_generic_def(&self, db: &dyn HirDatabase) -> Option<GenericDefId> {
|
||||||
match *self.kind(Interner) {
|
match *self.kind(Interner) {
|
||||||
TyKind::Adt(AdtId(adt), ..) => Some(adt.into()),
|
TyKind::Adt(AdtId(adt), ..) => Some(adt.into()),
|
||||||
TyKind::FnDef(callable, ..) => {
|
TyKind::FnDef(callable, ..) => Some(GenericDefId::from_callable(
|
||||||
Some(db.lookup_intern_callable_def(callable.into()).into())
|
db.upcast(),
|
||||||
}
|
db.lookup_intern_callable_def(callable.into()),
|
||||||
|
)),
|
||||||
TyKind::AssociatedType(type_alias, ..) => Some(from_assoc_type_id(type_alias).into()),
|
TyKind::AssociatedType(type_alias, ..) => Some(from_assoc_type_id(type_alias).into()),
|
||||||
TyKind::Foreign(type_alias, ..) => Some(from_foreign_def_id(type_alias).into()),
|
TyKind::Foreign(type_alias, ..) => Some(from_foreign_def_id(type_alias).into()),
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -308,7 +311,7 @@ impl TyExt for Ty {
|
||||||
TyKind::Placeholder(idx) => {
|
TyKind::Placeholder(idx) => {
|
||||||
let id = from_placeholder_idx(db, *idx);
|
let id = from_placeholder_idx(db, *idx);
|
||||||
let generic_params = db.generic_params(id.parent);
|
let generic_params = db.generic_params(id.parent);
|
||||||
let param_data = &generic_params.type_or_consts[id.local_id];
|
let param_data = &generic_params[id.local_id];
|
||||||
match param_data {
|
match param_data {
|
||||||
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
|
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
|
||||||
hir_def::generics::TypeParamProvenance::ArgumentImplTrait => {
|
hir_def::generics::TypeParamProvenance::ArgumentImplTrait => {
|
||||||
|
|
|
@ -1,6 +1,10 @@
|
||||||
use base_db::FileId;
|
use base_db::FileId;
|
||||||
use chalk_ir::Substitution;
|
use chalk_ir::Substitution;
|
||||||
use hir_def::db::DefDatabase;
|
use hir_def::db::DefDatabase;
|
||||||
|
use rustc_apfloat::{
|
||||||
|
ieee::{Half as f16, Quad as f128},
|
||||||
|
Float,
|
||||||
|
};
|
||||||
use test_fixture::WithFixture;
|
use test_fixture::WithFixture;
|
||||||
use test_utils::skip_slow_tests;
|
use test_utils::skip_slow_tests;
|
||||||
|
|
||||||
|
@ -140,6 +144,14 @@ fn bit_op() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn floating_point() {
|
fn floating_point() {
|
||||||
|
check_number(
|
||||||
|
r#"const GOAL: f128 = 2.0 + 3.0 * 5.5 - 8.;"#,
|
||||||
|
"10.5".parse::<f128>().unwrap().to_bits() as i128,
|
||||||
|
);
|
||||||
|
check_number(
|
||||||
|
r#"const GOAL: f128 = -90.0 + 36.0;"#,
|
||||||
|
"-54.0".parse::<f128>().unwrap().to_bits() as i128,
|
||||||
|
);
|
||||||
check_number(
|
check_number(
|
||||||
r#"const GOAL: f64 = 2.0 + 3.0 * 5.5 - 8.;"#,
|
r#"const GOAL: f64 = 2.0 + 3.0 * 5.5 - 8.;"#,
|
||||||
i128::from_le_bytes(pad16(&f64::to_le_bytes(10.5), true)),
|
i128::from_le_bytes(pad16(&f64::to_le_bytes(10.5), true)),
|
||||||
|
@ -152,6 +164,20 @@ fn floating_point() {
|
||||||
r#"const GOAL: f32 = -90.0 + 36.0;"#,
|
r#"const GOAL: f32 = -90.0 + 36.0;"#,
|
||||||
i128::from_le_bytes(pad16(&f32::to_le_bytes(-54.0), true)),
|
i128::from_le_bytes(pad16(&f32::to_le_bytes(-54.0), true)),
|
||||||
);
|
);
|
||||||
|
check_number(
|
||||||
|
r#"const GOAL: f16 = 2.0 + 3.0 * 5.5 - 8.;"#,
|
||||||
|
i128::from_le_bytes(pad16(
|
||||||
|
&u16::try_from("10.5".parse::<f16>().unwrap().to_bits()).unwrap().to_le_bytes(),
|
||||||
|
true,
|
||||||
|
)),
|
||||||
|
);
|
||||||
|
check_number(
|
||||||
|
r#"const GOAL: f16 = -90.0 + 36.0;"#,
|
||||||
|
i128::from_le_bytes(pad16(
|
||||||
|
&u16::try_from("-54.0".parse::<f16>().unwrap().to_bits()).unwrap().to_le_bytes(),
|
||||||
|
true,
|
||||||
|
)),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -411,6 +411,7 @@ fn likely() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn floating_point() {
|
fn floating_point() {
|
||||||
|
// FIXME(#17451): Add `f16` and `f128` tests once intrinsics are added.
|
||||||
check_number(
|
check_number(
|
||||||
r#"
|
r#"
|
||||||
extern "rust-intrinsic" {
|
extern "rust-intrinsic" {
|
||||||
|
@ -426,6 +427,7 @@ fn floating_point() {
|
||||||
true,
|
true,
|
||||||
)),
|
)),
|
||||||
);
|
);
|
||||||
|
#[allow(unknown_lints, clippy::unnecessary_min_or_max)]
|
||||||
check_number(
|
check_number(
|
||||||
r#"
|
r#"
|
||||||
extern "rust-intrinsic" {
|
extern "rust-intrinsic" {
|
||||||
|
|
|
@ -9,8 +9,8 @@ use base_db::{
|
||||||
CrateId, Upcast,
|
CrateId, Upcast,
|
||||||
};
|
};
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
db::DefDatabase, hir::ExprId, layout::TargetDataLayout, AdtId, BlockId, ConstParamId,
|
db::DefDatabase, hir::ExprId, layout::TargetDataLayout, AdtId, BlockId, CallableDefId,
|
||||||
DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId,
|
ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId,
|
||||||
LifetimeParamId, LocalFieldId, StaticId, TypeAliasId, TypeOrConstParamId, VariantId,
|
LifetimeParamId, LocalFieldId, StaticId, TypeAliasId, TypeOrConstParamId, VariantId,
|
||||||
};
|
};
|
||||||
use la_arena::ArenaMap;
|
use la_arena::ArenaMap;
|
||||||
|
@ -24,9 +24,8 @@ use crate::{
|
||||||
lower::{GenericDefaults, GenericPredicates},
|
lower::{GenericDefaults, GenericPredicates},
|
||||||
method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
|
method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
|
||||||
mir::{BorrowckResult, MirBody, MirLowerError},
|
mir::{BorrowckResult, MirBody, MirLowerError},
|
||||||
Binders, CallableDefId, ClosureId, Const, FnDefId, ImplTraitId, ImplTraits, InferenceResult,
|
Binders, ClosureId, Const, FnDefId, ImplTraitId, ImplTraits, InferenceResult, Interner,
|
||||||
Interner, PolyFnSig, QuantifiedWhereClause, Substitution, TraitEnvironment, TraitRef, Ty,
|
PolyFnSig, Substitution, TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId,
|
||||||
TyDefId, ValueTyDefId,
|
|
||||||
};
|
};
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
|
|
||||||
|
@ -81,8 +80,32 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
|
||||||
#[salsa::cycle(crate::consteval::const_eval_discriminant_recover)]
|
#[salsa::cycle(crate::consteval::const_eval_discriminant_recover)]
|
||||||
fn const_eval_discriminant(&self, def: EnumVariantId) -> Result<i128, ConstEvalError>;
|
fn const_eval_discriminant(&self, def: EnumVariantId) -> Result<i128, ConstEvalError>;
|
||||||
|
|
||||||
|
#[salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
|
||||||
|
fn lookup_impl_method(
|
||||||
|
&self,
|
||||||
|
env: Arc<TraitEnvironment>,
|
||||||
|
func: FunctionId,
|
||||||
|
fn_subst: Substitution,
|
||||||
|
) -> (FunctionId, Substitution);
|
||||||
|
|
||||||
// endregion:mir
|
// endregion:mir
|
||||||
|
|
||||||
|
#[salsa::invoke(crate::layout::layout_of_adt_query)]
|
||||||
|
#[salsa::cycle(crate::layout::layout_of_adt_recover)]
|
||||||
|
fn layout_of_adt(
|
||||||
|
&self,
|
||||||
|
def: AdtId,
|
||||||
|
subst: Substitution,
|
||||||
|
env: Arc<TraitEnvironment>,
|
||||||
|
) -> Result<Arc<Layout>, LayoutError>;
|
||||||
|
|
||||||
|
#[salsa::invoke(crate::layout::layout_of_ty_query)]
|
||||||
|
#[salsa::cycle(crate::layout::layout_of_ty_recover)]
|
||||||
|
fn layout_of_ty(&self, ty: Ty, env: Arc<TraitEnvironment>) -> Result<Arc<Layout>, LayoutError>;
|
||||||
|
|
||||||
|
#[salsa::invoke(crate::layout::target_data_layout_query)]
|
||||||
|
fn target_data_layout(&self, krate: CrateId) -> Result<Arc<TargetDataLayout>, Arc<str>>;
|
||||||
|
|
||||||
#[salsa::invoke(crate::lower::ty_query)]
|
#[salsa::invoke(crate::lower::ty_query)]
|
||||||
#[salsa::cycle(crate::lower::ty_recover)]
|
#[salsa::cycle(crate::lower::ty_recover)]
|
||||||
fn ty(&self, def: TyDefId) -> Binders<Ty>;
|
fn ty(&self, def: TyDefId) -> Binders<Ty>;
|
||||||
|
@ -105,30 +128,6 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
|
||||||
#[salsa::invoke(crate::lower::field_types_query)]
|
#[salsa::invoke(crate::lower::field_types_query)]
|
||||||
fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>>;
|
fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>>;
|
||||||
|
|
||||||
#[salsa::invoke(crate::layout::layout_of_adt_query)]
|
|
||||||
#[salsa::cycle(crate::layout::layout_of_adt_recover)]
|
|
||||||
fn layout_of_adt(
|
|
||||||
&self,
|
|
||||||
def: AdtId,
|
|
||||||
subst: Substitution,
|
|
||||||
env: Arc<TraitEnvironment>,
|
|
||||||
) -> Result<Arc<Layout>, LayoutError>;
|
|
||||||
|
|
||||||
#[salsa::invoke(crate::layout::layout_of_ty_query)]
|
|
||||||
#[salsa::cycle(crate::layout::layout_of_ty_recover)]
|
|
||||||
fn layout_of_ty(&self, ty: Ty, env: Arc<TraitEnvironment>) -> Result<Arc<Layout>, LayoutError>;
|
|
||||||
|
|
||||||
#[salsa::invoke(crate::layout::target_data_layout_query)]
|
|
||||||
fn target_data_layout(&self, krate: CrateId) -> Result<Arc<TargetDataLayout>, Arc<str>>;
|
|
||||||
|
|
||||||
#[salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
|
|
||||||
fn lookup_impl_method(
|
|
||||||
&self,
|
|
||||||
env: Arc<TraitEnvironment>,
|
|
||||||
func: FunctionId,
|
|
||||||
fn_subst: Substitution,
|
|
||||||
) -> (FunctionId, Substitution);
|
|
||||||
|
|
||||||
#[salsa::invoke(crate::lower::callable_item_sig)]
|
#[salsa::invoke(crate::lower::callable_item_sig)]
|
||||||
fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig;
|
fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig;
|
||||||
|
|
||||||
|
@ -145,7 +144,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
|
||||||
def: GenericDefId,
|
def: GenericDefId,
|
||||||
param_id: TypeOrConstParamId,
|
param_id: TypeOrConstParamId,
|
||||||
assoc_name: Option<Name>,
|
assoc_name: Option<Name>,
|
||||||
) -> Arc<[Binders<QuantifiedWhereClause>]>;
|
) -> GenericPredicates;
|
||||||
|
|
||||||
#[salsa::invoke(crate::lower::generic_predicates_query)]
|
#[salsa::invoke(crate::lower::generic_predicates_query)]
|
||||||
fn generic_predicates(&self, def: GenericDefId) -> GenericPredicates;
|
fn generic_predicates(&self, def: GenericDefId) -> GenericPredicates;
|
||||||
|
@ -232,7 +231,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
|
||||||
) -> sync::Arc<chalk_db::ImplDatum>;
|
) -> sync::Arc<chalk_db::ImplDatum>;
|
||||||
|
|
||||||
#[salsa::invoke(chalk_db::fn_def_datum_query)]
|
#[salsa::invoke(chalk_db::fn_def_datum_query)]
|
||||||
fn fn_def_datum(&self, krate: CrateId, fn_def_id: FnDefId) -> sync::Arc<chalk_db::FnDefDatum>;
|
fn fn_def_datum(&self, fn_def_id: FnDefId) -> sync::Arc<chalk_db::FnDefDatum>;
|
||||||
|
|
||||||
#[salsa::invoke(chalk_db::fn_def_variance_query)]
|
#[salsa::invoke(chalk_db::fn_def_variance_query)]
|
||||||
fn fn_def_variance(&self, fn_def_id: FnDefId) -> chalk_db::Variances;
|
fn fn_def_variance(&self, fn_def_id: FnDefId) -> chalk_db::Variances;
|
||||||
|
|
|
@ -196,6 +196,9 @@ impl ExprValidator {
|
||||||
let Some(pat_ty) = self.infer.type_of_pat.get(arm.pat) else {
|
let Some(pat_ty) = self.infer.type_of_pat.get(arm.pat) else {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
if pat_ty.contains_unknown() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// We only include patterns whose type matches the type
|
// We only include patterns whose type matches the type
|
||||||
// of the scrutinee expression. If we had an InvalidMatchArmPattern
|
// of the scrutinee expression. If we had an InvalidMatchArmPattern
|
||||||
|
|
|
@ -51,6 +51,7 @@ pub(crate) struct Pat {
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
pub(crate) enum PatKind {
|
pub(crate) enum PatKind {
|
||||||
Wild,
|
Wild,
|
||||||
|
Never,
|
||||||
|
|
||||||
/// `x`, `ref x`, `x @ P`, etc.
|
/// `x`, `ref x`, `x @ P`, etc.
|
||||||
Binding {
|
Binding {
|
||||||
|
@ -294,6 +295,7 @@ impl HirDisplay for Pat {
|
||||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
match &*self.kind {
|
match &*self.kind {
|
||||||
PatKind::Wild => write!(f, "_"),
|
PatKind::Wild => write!(f, "_"),
|
||||||
|
PatKind::Never => write!(f, "!"),
|
||||||
PatKind::Binding { name, subpattern } => {
|
PatKind::Binding { name, subpattern } => {
|
||||||
write!(f, "{}", name.display(f.db.upcast()))?;
|
write!(f, "{}", name.display(f.db.upcast()))?;
|
||||||
if let Some(subpattern) = subpattern {
|
if let Some(subpattern) = subpattern {
|
||||||
|
|
|
@ -4,12 +4,10 @@ use std::fmt;
|
||||||
|
|
||||||
use hir_def::{DefWithBodyId, EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
|
use hir_def::{DefWithBodyId, EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
|
||||||
use once_cell::unsync::Lazy;
|
use once_cell::unsync::Lazy;
|
||||||
use rustc_hash::FxHashMap;
|
|
||||||
use rustc_pattern_analysis::{
|
use rustc_pattern_analysis::{
|
||||||
constructor::{Constructor, ConstructorSet, VariantVisibility},
|
constructor::{Constructor, ConstructorSet, VariantVisibility},
|
||||||
index::IdxContainer,
|
|
||||||
usefulness::{compute_match_usefulness, PlaceValidity, UsefulnessReport},
|
usefulness::{compute_match_usefulness, PlaceValidity, UsefulnessReport},
|
||||||
Captures, PatCx, PrivateUninhabitedField,
|
Captures, IndexVec, PatCx, PrivateUninhabitedField,
|
||||||
};
|
};
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
use stdx::never;
|
use stdx::never;
|
||||||
|
@ -26,10 +24,10 @@ use super::{is_box, FieldPat, Pat, PatKind};
|
||||||
use Constructor::*;
|
use Constructor::*;
|
||||||
|
|
||||||
// Re-export r-a-specific versions of all these types.
|
// Re-export r-a-specific versions of all these types.
|
||||||
pub(crate) type DeconstructedPat<'p> =
|
pub(crate) type DeconstructedPat<'db> =
|
||||||
rustc_pattern_analysis::pat::DeconstructedPat<MatchCheckCtx<'p>>;
|
rustc_pattern_analysis::pat::DeconstructedPat<MatchCheckCtx<'db>>;
|
||||||
pub(crate) type MatchArm<'p> = rustc_pattern_analysis::MatchArm<'p, MatchCheckCtx<'p>>;
|
pub(crate) type MatchArm<'db> = rustc_pattern_analysis::MatchArm<'db, MatchCheckCtx<'db>>;
|
||||||
pub(crate) type WitnessPat<'p> = rustc_pattern_analysis::pat::WitnessPat<MatchCheckCtx<'p>>;
|
pub(crate) type WitnessPat<'db> = rustc_pattern_analysis::pat::WitnessPat<MatchCheckCtx<'db>>;
|
||||||
|
|
||||||
/// [Constructor] uses this in unimplemented variants.
|
/// [Constructor] uses this in unimplemented variants.
|
||||||
/// It allows porting match expressions from upstream algorithm without losing semantics.
|
/// It allows porting match expressions from upstream algorithm without losing semantics.
|
||||||
|
@ -54,23 +52,27 @@ impl EnumVariantContiguousIndex {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl rustc_pattern_analysis::Idx for EnumVariantContiguousIndex {
|
||||||
|
fn new(idx: usize) -> Self {
|
||||||
|
EnumVariantContiguousIndex(idx)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn index(self) -> usize {
|
||||||
|
self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub(crate) struct MatchCheckCtx<'p> {
|
pub(crate) struct MatchCheckCtx<'db> {
|
||||||
module: ModuleId,
|
module: ModuleId,
|
||||||
body: DefWithBodyId,
|
body: DefWithBodyId,
|
||||||
pub(crate) db: &'p dyn HirDatabase,
|
pub(crate) db: &'db dyn HirDatabase,
|
||||||
exhaustive_patterns: bool,
|
exhaustive_patterns: bool,
|
||||||
min_exhaustive_patterns: bool,
|
min_exhaustive_patterns: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
impl<'db> MatchCheckCtx<'db> {
|
||||||
pub(crate) struct PatData<'p> {
|
pub(crate) fn new(module: ModuleId, body: DefWithBodyId, db: &'db dyn HirDatabase) -> Self {
|
||||||
/// Keep db around so that we can print variant names in `Debug`.
|
|
||||||
pub(crate) db: &'p dyn HirDatabase,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'p> MatchCheckCtx<'p> {
|
|
||||||
pub(crate) fn new(module: ModuleId, body: DefWithBodyId, db: &'p dyn HirDatabase) -> Self {
|
|
||||||
let def_map = db.crate_def_map(module.krate());
|
let def_map = db.crate_def_map(module.krate());
|
||||||
let exhaustive_patterns = def_map.is_unstable_feature_enabled("exhaustive_patterns");
|
let exhaustive_patterns = def_map.is_unstable_feature_enabled("exhaustive_patterns");
|
||||||
let min_exhaustive_patterns =
|
let min_exhaustive_patterns =
|
||||||
|
@ -80,9 +82,9 @@ impl<'p> MatchCheckCtx<'p> {
|
||||||
|
|
||||||
pub(crate) fn compute_match_usefulness(
|
pub(crate) fn compute_match_usefulness(
|
||||||
&self,
|
&self,
|
||||||
arms: &[MatchArm<'p>],
|
arms: &[MatchArm<'db>],
|
||||||
scrut_ty: Ty,
|
scrut_ty: Ty,
|
||||||
) -> Result<UsefulnessReport<'p, Self>, ()> {
|
) -> Result<UsefulnessReport<'db, Self>, ()> {
|
||||||
// FIXME: Determine place validity correctly. For now, err on the safe side.
|
// FIXME: Determine place validity correctly. For now, err on the safe side.
|
||||||
let place_validity = PlaceValidity::MaybeInvalid;
|
let place_validity = PlaceValidity::MaybeInvalid;
|
||||||
// Measured to take ~100ms on modern hardware.
|
// Measured to take ~100ms on modern hardware.
|
||||||
|
@ -101,7 +103,7 @@ impl<'p> MatchCheckCtx<'p> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn variant_id_for_adt(
|
fn variant_id_for_adt(
|
||||||
db: &'p dyn HirDatabase,
|
db: &'db dyn HirDatabase,
|
||||||
ctor: &Constructor<Self>,
|
ctor: &Constructor<Self>,
|
||||||
adt: hir_def::AdtId,
|
adt: hir_def::AdtId,
|
||||||
) -> Option<VariantId> {
|
) -> Option<VariantId> {
|
||||||
|
@ -126,7 +128,7 @@ impl<'p> MatchCheckCtx<'p> {
|
||||||
&'a self,
|
&'a self,
|
||||||
ty: &'a Ty,
|
ty: &'a Ty,
|
||||||
variant: VariantId,
|
variant: VariantId,
|
||||||
) -> impl Iterator<Item = (LocalFieldId, Ty)> + Captures<'a> + Captures<'p> {
|
) -> impl Iterator<Item = (LocalFieldId, Ty)> + Captures<'a> + Captures<'db> {
|
||||||
let (_, substs) = ty.as_adt().unwrap();
|
let (_, substs) = ty.as_adt().unwrap();
|
||||||
|
|
||||||
let field_tys = self.db.field_types(variant);
|
let field_tys = self.db.field_types(variant);
|
||||||
|
@ -139,8 +141,8 @@ impl<'p> MatchCheckCtx<'p> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn lower_pat(&self, pat: &Pat) -> DeconstructedPat<'p> {
|
pub(crate) fn lower_pat(&self, pat: &Pat) -> DeconstructedPat<'db> {
|
||||||
let singleton = |pat: DeconstructedPat<'p>| vec![pat.at_index(0)];
|
let singleton = |pat: DeconstructedPat<'db>| vec![pat.at_index(0)];
|
||||||
let ctor;
|
let ctor;
|
||||||
let mut fields: Vec<_>;
|
let mut fields: Vec<_>;
|
||||||
let arity;
|
let arity;
|
||||||
|
@ -228,6 +230,11 @@ impl<'p> MatchCheckCtx<'p> {
|
||||||
fields = Vec::new();
|
fields = Vec::new();
|
||||||
arity = 0;
|
arity = 0;
|
||||||
}
|
}
|
||||||
|
PatKind::Never => {
|
||||||
|
ctor = Never;
|
||||||
|
fields = Vec::new();
|
||||||
|
arity = 0;
|
||||||
|
}
|
||||||
PatKind::Or { pats } => {
|
PatKind::Or { pats } => {
|
||||||
ctor = Or;
|
ctor = Or;
|
||||||
fields = pats
|
fields = pats
|
||||||
|
@ -238,11 +245,10 @@ impl<'p> MatchCheckCtx<'p> {
|
||||||
arity = pats.len();
|
arity = pats.len();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let data = PatData { db: self.db };
|
DeconstructedPat::new(ctor, fields, arity, pat.ty.clone(), ())
|
||||||
DeconstructedPat::new(ctor, fields, arity, pat.ty.clone(), data)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn hoist_witness_pat(&self, pat: &WitnessPat<'p>) -> Pat {
|
pub(crate) fn hoist_witness_pat(&self, pat: &WitnessPat<'db>) -> Pat {
|
||||||
let mut subpatterns = pat.iter_fields().map(|p| self.hoist_witness_pat(p));
|
let mut subpatterns = pat.iter_fields().map(|p| self.hoist_witness_pat(p));
|
||||||
let kind = match pat.ctor() {
|
let kind = match pat.ctor() {
|
||||||
&Bool(value) => PatKind::LiteralBool { value },
|
&Bool(value) => PatKind::LiteralBool { value },
|
||||||
|
@ -290,6 +296,7 @@ impl<'p> MatchCheckCtx<'p> {
|
||||||
Slice(_) => unimplemented!(),
|
Slice(_) => unimplemented!(),
|
||||||
&Str(void) => match void {},
|
&Str(void) => match void {},
|
||||||
Wildcard | NonExhaustive | Hidden | PrivateUninhabited => PatKind::Wild,
|
Wildcard | NonExhaustive | Hidden | PrivateUninhabited => PatKind::Wild,
|
||||||
|
Never => PatKind::Never,
|
||||||
Missing | F32Range(..) | F64Range(..) | Opaque(..) | Or => {
|
Missing | F32Range(..) | F64Range(..) | Opaque(..) | Or => {
|
||||||
never!("can't convert to pattern: {:?}", pat.ctor());
|
never!("can't convert to pattern: {:?}", pat.ctor());
|
||||||
PatKind::Wild
|
PatKind::Wild
|
||||||
|
@ -299,13 +306,13 @@ impl<'p> MatchCheckCtx<'p> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'p> PatCx for MatchCheckCtx<'p> {
|
impl<'db> PatCx for MatchCheckCtx<'db> {
|
||||||
type Error = ();
|
type Error = ();
|
||||||
type Ty = Ty;
|
type Ty = Ty;
|
||||||
type VariantIdx = EnumVariantContiguousIndex;
|
type VariantIdx = EnumVariantContiguousIndex;
|
||||||
type StrLit = Void;
|
type StrLit = Void;
|
||||||
type ArmData = ();
|
type ArmData = ();
|
||||||
type PatData = PatData<'p>;
|
type PatData = ();
|
||||||
|
|
||||||
fn is_exhaustive_patterns_feature_on(&self) -> bool {
|
fn is_exhaustive_patterns_feature_on(&self) -> bool {
|
||||||
self.exhaustive_patterns
|
self.exhaustive_patterns
|
||||||
|
@ -339,8 +346,8 @@ impl<'p> PatCx for MatchCheckCtx<'p> {
|
||||||
},
|
},
|
||||||
Ref => 1,
|
Ref => 1,
|
||||||
Slice(..) => unimplemented!(),
|
Slice(..) => unimplemented!(),
|
||||||
Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) | Opaque(..)
|
Never | Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..)
|
||||||
| NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => 0,
|
| Opaque(..) | NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => 0,
|
||||||
Or => {
|
Or => {
|
||||||
never!("The `Or` constructor doesn't have a fixed arity");
|
never!("The `Or` constructor doesn't have a fixed arity");
|
||||||
0
|
0
|
||||||
|
@ -402,8 +409,10 @@ impl<'p> PatCx for MatchCheckCtx<'p> {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Slice(_) => unreachable!("Found a `Slice` constructor in match checking"),
|
Slice(_) => unreachable!("Found a `Slice` constructor in match checking"),
|
||||||
Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) | Opaque(..)
|
Never | Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..)
|
||||||
| NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => smallvec![],
|
| Opaque(..) | NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => {
|
||||||
|
smallvec![]
|
||||||
|
}
|
||||||
Or => {
|
Or => {
|
||||||
never!("called `Fields::wildcards` on an `Or` ctor");
|
never!("called `Fields::wildcards` on an `Or` ctor");
|
||||||
smallvec![]
|
smallvec![]
|
||||||
|
@ -442,11 +451,8 @@ impl<'p> PatCx for MatchCheckCtx<'p> {
|
||||||
if enum_data.variants.is_empty() && !is_declared_nonexhaustive {
|
if enum_data.variants.is_empty() && !is_declared_nonexhaustive {
|
||||||
ConstructorSet::NoConstructors
|
ConstructorSet::NoConstructors
|
||||||
} else {
|
} else {
|
||||||
let mut variants = FxHashMap::with_capacity_and_hasher(
|
let mut variants = IndexVec::with_capacity(enum_data.variants.len());
|
||||||
enum_data.variants.len(),
|
for &(variant, _) in enum_data.variants.iter() {
|
||||||
Default::default(),
|
|
||||||
);
|
|
||||||
for (i, &(variant, _)) in enum_data.variants.iter().enumerate() {
|
|
||||||
let is_uninhabited =
|
let is_uninhabited =
|
||||||
is_enum_variant_uninhabited_from(cx.db, variant, subst, cx.module);
|
is_enum_variant_uninhabited_from(cx.db, variant, subst, cx.module);
|
||||||
let visibility = if is_uninhabited {
|
let visibility = if is_uninhabited {
|
||||||
|
@ -454,13 +460,10 @@ impl<'p> PatCx for MatchCheckCtx<'p> {
|
||||||
} else {
|
} else {
|
||||||
VariantVisibility::Visible
|
VariantVisibility::Visible
|
||||||
};
|
};
|
||||||
variants.insert(EnumVariantContiguousIndex(i), visibility);
|
variants.push(visibility);
|
||||||
}
|
}
|
||||||
|
|
||||||
ConstructorSet::Variants {
|
ConstructorSet::Variants { variants, non_exhaustive: is_declared_nonexhaustive }
|
||||||
variants: IdxContainer(variants),
|
|
||||||
non_exhaustive: is_declared_nonexhaustive,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TyKind::Adt(AdtId(hir_def::AdtId::UnionId(_)), _) => ConstructorSet::Union,
|
TyKind::Adt(AdtId(hir_def::AdtId::UnionId(_)), _) => ConstructorSet::Union,
|
||||||
|
@ -476,26 +479,27 @@ impl<'p> PatCx for MatchCheckCtx<'p> {
|
||||||
|
|
||||||
fn write_variant_name(
|
fn write_variant_name(
|
||||||
f: &mut fmt::Formatter<'_>,
|
f: &mut fmt::Formatter<'_>,
|
||||||
pat: &rustc_pattern_analysis::pat::DeconstructedPat<Self>,
|
_ctor: &Constructor<Self>,
|
||||||
|
_ty: &Self::Ty,
|
||||||
) -> fmt::Result {
|
) -> fmt::Result {
|
||||||
let db = pat.data().db;
|
write!(f, "<write_variant_name unsupported>")
|
||||||
let variant =
|
// We lack the database here ...
|
||||||
pat.ty().as_adt().and_then(|(adt, _)| Self::variant_id_for_adt(db, pat.ctor(), adt));
|
// let variant = ty.as_adt().and_then(|(adt, _)| Self::variant_id_for_adt(db, ctor, adt));
|
||||||
|
|
||||||
if let Some(variant) = variant {
|
// if let Some(variant) = variant {
|
||||||
match variant {
|
// match variant {
|
||||||
VariantId::EnumVariantId(v) => {
|
// VariantId::EnumVariantId(v) => {
|
||||||
write!(f, "{}", db.enum_variant_data(v).name.display(db.upcast()))?;
|
// write!(f, "{}", db.enum_variant_data(v).name.display(db.upcast()))?;
|
||||||
}
|
// }
|
||||||
VariantId::StructId(s) => {
|
// VariantId::StructId(s) => {
|
||||||
write!(f, "{}", db.struct_data(s).name.display(db.upcast()))?
|
// write!(f, "{}", db.struct_data(s).name.display(db.upcast()))?
|
||||||
}
|
// }
|
||||||
VariantId::UnionId(u) => {
|
// VariantId::UnionId(u) => {
|
||||||
write!(f, "{}", db.union_data(u).name.display(db.upcast()))?
|
// write!(f, "{}", db.union_data(u).name.display(db.upcast()))?
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
Ok(())
|
// Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn bug(&self, fmt: fmt::Arguments<'_>) {
|
fn bug(&self, fmt: fmt::Arguments<'_>) {
|
||||||
|
@ -507,7 +511,7 @@ impl<'p> PatCx for MatchCheckCtx<'p> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'p> fmt::Debug for MatchCheckCtx<'p> {
|
impl<'db> fmt::Debug for MatchCheckCtx<'db> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
f.debug_struct("MatchCheckCtx").finish()
|
f.debug_struct("MatchCheckCtx").finish()
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,13 +21,17 @@ use hir_def::{
|
||||||
path::{Path, PathKind},
|
path::{Path, PathKind},
|
||||||
type_ref::{TraitBoundModifier, TypeBound, TypeRef},
|
type_ref::{TraitBoundModifier, TypeBound, TypeRef},
|
||||||
visibility::Visibility,
|
visibility::Visibility,
|
||||||
HasModule, ImportPathConfig, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId,
|
GenericDefId, HasModule, ImportPathConfig, ItemContainerId, LocalFieldId, Lookup, ModuleDefId,
|
||||||
TraitId,
|
ModuleId, TraitId,
|
||||||
};
|
};
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
use intern::{Internable, Interned};
|
use intern::{Internable, Interned};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use la_arena::ArenaMap;
|
use la_arena::ArenaMap;
|
||||||
|
use rustc_apfloat::{
|
||||||
|
ieee::{Half as f16, Quad as f128},
|
||||||
|
Float,
|
||||||
|
};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use stdx::{never, IsNoneOr};
|
use stdx::{never, IsNoneOr};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
@ -545,6 +549,17 @@ fn render_const_scalar(
|
||||||
write!(f, "{it}")
|
write!(f, "{it}")
|
||||||
}
|
}
|
||||||
Scalar::Float(fl) => match fl {
|
Scalar::Float(fl) => match fl {
|
||||||
|
chalk_ir::FloatTy::F16 => {
|
||||||
|
// FIXME(#17451): Replace with builtins once they are stabilised.
|
||||||
|
let it = f16::from_bits(u16::from_le_bytes(b.try_into().unwrap()).into());
|
||||||
|
let s = it.to_string();
|
||||||
|
if s.strip_prefix('-').unwrap_or(&s).chars().all(|c| c.is_ascii_digit()) {
|
||||||
|
// Match Rust debug formatting
|
||||||
|
write!(f, "{s}.0")
|
||||||
|
} else {
|
||||||
|
write!(f, "{s}")
|
||||||
|
}
|
||||||
|
}
|
||||||
chalk_ir::FloatTy::F32 => {
|
chalk_ir::FloatTy::F32 => {
|
||||||
let it = f32::from_le_bytes(b.try_into().unwrap());
|
let it = f32::from_le_bytes(b.try_into().unwrap());
|
||||||
write!(f, "{it:?}")
|
write!(f, "{it:?}")
|
||||||
|
@ -553,6 +568,17 @@ fn render_const_scalar(
|
||||||
let it = f64::from_le_bytes(b.try_into().unwrap());
|
let it = f64::from_le_bytes(b.try_into().unwrap());
|
||||||
write!(f, "{it:?}")
|
write!(f, "{it:?}")
|
||||||
}
|
}
|
||||||
|
chalk_ir::FloatTy::F128 => {
|
||||||
|
// FIXME(#17451): Replace with builtins once they are stabilised.
|
||||||
|
let it = f128::from_bits(u128::from_le_bytes(b.try_into().unwrap()));
|
||||||
|
let s = it.to_string();
|
||||||
|
if s.strip_prefix('-').unwrap_or(&s).chars().all(|c| c.is_ascii_digit()) {
|
||||||
|
// Match Rust debug formatting
|
||||||
|
write!(f, "{s}.0")
|
||||||
|
} else {
|
||||||
|
write!(f, "{s}")
|
||||||
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
TyKind::Ref(_, _, t) => match t.kind(Interner) {
|
TyKind::Ref(_, _, t) => match t.kind(Interner) {
|
||||||
|
@ -988,7 +1014,8 @@ impl HirDisplay for Ty {
|
||||||
f.end_location_link();
|
f.end_location_link();
|
||||||
|
|
||||||
if parameters.len(Interner) > 0 {
|
if parameters.len(Interner) > 0 {
|
||||||
let generics = generics(db.upcast(), def.into());
|
let generic_def_id = GenericDefId::from_callable(db.upcast(), def);
|
||||||
|
let generics = generics(db.upcast(), generic_def_id);
|
||||||
let (parent_len, self_param, type_, const_, impl_, lifetime) =
|
let (parent_len, self_param, type_, const_, impl_, lifetime) =
|
||||||
generics.provenance_split();
|
generics.provenance_split();
|
||||||
let parameters = parameters.as_slice(Interner);
|
let parameters = parameters.as_slice(Interner);
|
||||||
|
@ -1002,8 +1029,9 @@ impl HirDisplay for Ty {
|
||||||
debug_assert_eq!(parent_params.len(), parent_len);
|
debug_assert_eq!(parent_params.len(), parent_len);
|
||||||
|
|
||||||
let parent_params =
|
let parent_params =
|
||||||
generic_args_sans_defaults(f, Some(def.into()), parent_params);
|
generic_args_sans_defaults(f, Some(generic_def_id), parent_params);
|
||||||
let fn_params = generic_args_sans_defaults(f, Some(def.into()), fn_params);
|
let fn_params =
|
||||||
|
generic_args_sans_defaults(f, Some(generic_def_id), fn_params);
|
||||||
|
|
||||||
write!(f, "<")?;
|
write!(f, "<")?;
|
||||||
hir_fmt_generic_arguments(f, parent_params, None)?;
|
hir_fmt_generic_arguments(f, parent_params, None)?;
|
||||||
|
@ -1041,7 +1069,11 @@ impl HirDisplay for Ty {
|
||||||
module_id,
|
module_id,
|
||||||
PrefixKind::Plain,
|
PrefixKind::Plain,
|
||||||
false,
|
false,
|
||||||
ImportPathConfig { prefer_no_std: false, prefer_prelude: true },
|
ImportPathConfig {
|
||||||
|
prefer_no_std: false,
|
||||||
|
prefer_prelude: true,
|
||||||
|
prefer_absolute: false,
|
||||||
|
},
|
||||||
) {
|
) {
|
||||||
write!(f, "{}", path.display(f.db.upcast()))?;
|
write!(f, "{}", path.display(f.db.upcast()))?;
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -2,8 +2,8 @@
|
||||||
//!
|
//!
|
||||||
//! The layout for generics as expected by chalk are as follows:
|
//! The layout for generics as expected by chalk are as follows:
|
||||||
//! - Optional Self parameter
|
//! - Optional Self parameter
|
||||||
//! - Type or Const parameters
|
|
||||||
//! - Lifetime parameters
|
//! - Lifetime parameters
|
||||||
|
//! - Type or Const parameters
|
||||||
//! - Parent parameters
|
//! - Parent parameters
|
||||||
//!
|
//!
|
||||||
//! where parent follows the same scheme.
|
//! where parent follows the same scheme.
|
||||||
|
@ -20,18 +20,23 @@ use hir_def::{
|
||||||
LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId,
|
LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId,
|
||||||
};
|
};
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
|
use itertools::chain;
|
||||||
|
use stdx::TupleExt;
|
||||||
|
|
||||||
use crate::{db::HirDatabase, lt_to_placeholder_idx, to_placeholder_idx, Interner, Substitution};
|
use crate::{db::HirDatabase, lt_to_placeholder_idx, to_placeholder_idx, Interner, Substitution};
|
||||||
|
|
||||||
pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
|
pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
|
||||||
let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def)));
|
let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def)));
|
||||||
Generics { def, params: db.generic_params(def), parent_generics }
|
let params = db.generic_params(def);
|
||||||
|
let has_trait_self_param = params.trait_self_param().is_some();
|
||||||
|
Generics { def, params, parent_generics, has_trait_self_param }
|
||||||
}
|
}
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub(crate) struct Generics {
|
pub(crate) struct Generics {
|
||||||
def: GenericDefId,
|
def: GenericDefId,
|
||||||
params: Interned<GenericParams>,
|
params: Interned<GenericParams>,
|
||||||
parent_generics: Option<Box<Generics>>,
|
parent_generics: Option<Box<Generics>>,
|
||||||
|
has_trait_self_param: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> ops::Index<T> for Generics
|
impl<T> ops::Index<T> for Generics
|
||||||
|
@ -57,7 +62,7 @@ impl Generics {
|
||||||
self.iter_self().map(|(id, _)| id)
|
self.iter_self().map(|(id, _)| id)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn iter_parent_id(&self) -> impl Iterator<Item = GenericParamId> + '_ {
|
pub(crate) fn iter_parent_id(&self) -> impl Iterator<Item = GenericParamId> + '_ {
|
||||||
self.iter_parent().map(|(id, _)| id)
|
self.iter_parent().map(|(id, _)| id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -67,6 +72,12 @@ impl Generics {
|
||||||
self.params.iter_type_or_consts()
|
self.params.iter_type_or_consts()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn iter_self_type_or_consts_id(
|
||||||
|
&self,
|
||||||
|
) -> impl DoubleEndedIterator<Item = GenericParamId> + '_ {
|
||||||
|
self.params.iter_type_or_consts().map(from_toc_id(self)).map(TupleExt::head)
|
||||||
|
}
|
||||||
|
|
||||||
/// Iterate over the params followed by the parent params.
|
/// Iterate over the params followed by the parent params.
|
||||||
pub(crate) fn iter(
|
pub(crate) fn iter(
|
||||||
&self,
|
&self,
|
||||||
|
@ -78,10 +89,9 @@ impl Generics {
|
||||||
pub(crate) fn iter_self(
|
pub(crate) fn iter_self(
|
||||||
&self,
|
&self,
|
||||||
) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'_>)> + '_ {
|
) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'_>)> + '_ {
|
||||||
self.params
|
let mut toc = self.params.iter_type_or_consts().map(from_toc_id(self));
|
||||||
.iter_type_or_consts()
|
let trait_self_param = self.has_trait_self_param.then(|| toc.next()).flatten();
|
||||||
.map(from_toc_id(self))
|
chain!(trait_self_param, self.params.iter_lt().map(from_lt_id(self)), toc)
|
||||||
.chain(self.params.iter_lt().map(from_lt_id(self)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterator over types and const params of parent.
|
/// Iterator over types and const params of parent.
|
||||||
|
@ -89,8 +99,9 @@ impl Generics {
|
||||||
&self,
|
&self,
|
||||||
) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'_>)> + '_ {
|
) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'_>)> + '_ {
|
||||||
self.parent_generics().into_iter().flat_map(|it| {
|
self.parent_generics().into_iter().flat_map(|it| {
|
||||||
let lt_iter = it.params.iter_lt().map(from_lt_id(it));
|
let mut toc = it.params.iter_type_or_consts().map(from_toc_id(it));
|
||||||
it.params.iter_type_or_consts().map(from_toc_id(it)).chain(lt_iter)
|
let trait_self_param = it.has_trait_self_param.then(|| toc.next()).flatten();
|
||||||
|
chain!(trait_self_param, it.params.iter_lt().map(from_lt_id(it)), toc)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -134,8 +145,11 @@ impl Generics {
|
||||||
fn find_type_or_const_param(&self, param: TypeOrConstParamId) -> Option<usize> {
|
fn find_type_or_const_param(&self, param: TypeOrConstParamId) -> Option<usize> {
|
||||||
if param.parent == self.def {
|
if param.parent == self.def {
|
||||||
let idx = param.local_id.into_raw().into_u32() as usize;
|
let idx = param.local_id.into_raw().into_u32() as usize;
|
||||||
debug_assert!(idx <= self.params.type_or_consts.len());
|
debug_assert!(idx <= self.params.len_type_or_consts());
|
||||||
Some(idx)
|
if self.params.trait_self_param() == Some(param.local_id) {
|
||||||
|
return Some(idx);
|
||||||
|
}
|
||||||
|
Some(self.params.len_lifetimes() + idx)
|
||||||
} else {
|
} else {
|
||||||
debug_assert_eq!(self.parent_generics().map(|it| it.def), Some(param.parent));
|
debug_assert_eq!(self.parent_generics().map(|it| it.def), Some(param.parent));
|
||||||
self.parent_generics()
|
self.parent_generics()
|
||||||
|
@ -152,8 +166,8 @@ impl Generics {
|
||||||
fn find_lifetime(&self, lifetime: LifetimeParamId) -> Option<usize> {
|
fn find_lifetime(&self, lifetime: LifetimeParamId) -> Option<usize> {
|
||||||
if lifetime.parent == self.def {
|
if lifetime.parent == self.def {
|
||||||
let idx = lifetime.local_id.into_raw().into_u32() as usize;
|
let idx = lifetime.local_id.into_raw().into_u32() as usize;
|
||||||
debug_assert!(idx <= self.params.lifetimes.len());
|
debug_assert!(idx <= self.params.len_lifetimes());
|
||||||
Some(self.params.type_or_consts.len() + idx)
|
Some(self.params.trait_self_param().is_some() as usize + idx)
|
||||||
} else {
|
} else {
|
||||||
debug_assert_eq!(self.parent_generics().map(|it| it.def), Some(lifetime.parent));
|
debug_assert_eq!(self.parent_generics().map(|it| it.def), Some(lifetime.parent));
|
||||||
self.parent_generics()
|
self.parent_generics()
|
||||||
|
@ -216,7 +230,6 @@ fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option<Generic
|
||||||
GenericDefId::FunctionId(it) => it.lookup(db).container,
|
GenericDefId::FunctionId(it) => it.lookup(db).container,
|
||||||
GenericDefId::TypeAliasId(it) => it.lookup(db).container,
|
GenericDefId::TypeAliasId(it) => it.lookup(db).container,
|
||||||
GenericDefId::ConstId(it) => it.lookup(db).container,
|
GenericDefId::ConstId(it) => it.lookup(db).container,
|
||||||
GenericDefId::EnumVariantId(it) => return Some(it.lookup(db).parent.into()),
|
|
||||||
GenericDefId::AdtId(_)
|
GenericDefId::AdtId(_)
|
||||||
| GenericDefId::TraitId(_)
|
| GenericDefId::TraitId(_)
|
||||||
| GenericDefId::ImplId(_)
|
| GenericDefId::ImplId(_)
|
||||||
|
|
|
@ -701,18 +701,23 @@ impl<'a> InferenceContext<'a> {
|
||||||
table.propagate_diverging_flag();
|
table.propagate_diverging_flag();
|
||||||
for ty in type_of_expr.values_mut() {
|
for ty in type_of_expr.values_mut() {
|
||||||
*ty = table.resolve_completely(ty.clone());
|
*ty = table.resolve_completely(ty.clone());
|
||||||
|
*has_errors = *has_errors || ty.contains_unknown();
|
||||||
}
|
}
|
||||||
for ty in type_of_pat.values_mut() {
|
for ty in type_of_pat.values_mut() {
|
||||||
*ty = table.resolve_completely(ty.clone());
|
*ty = table.resolve_completely(ty.clone());
|
||||||
|
*has_errors = *has_errors || ty.contains_unknown();
|
||||||
}
|
}
|
||||||
for ty in type_of_binding.values_mut() {
|
for ty in type_of_binding.values_mut() {
|
||||||
*ty = table.resolve_completely(ty.clone());
|
*ty = table.resolve_completely(ty.clone());
|
||||||
|
*has_errors = *has_errors || ty.contains_unknown();
|
||||||
}
|
}
|
||||||
for ty in type_of_rpit.values_mut() {
|
for ty in type_of_rpit.values_mut() {
|
||||||
*ty = table.resolve_completely(ty.clone());
|
*ty = table.resolve_completely(ty.clone());
|
||||||
|
*has_errors = *has_errors || ty.contains_unknown();
|
||||||
}
|
}
|
||||||
for ty in type_of_for_iterator.values_mut() {
|
for ty in type_of_for_iterator.values_mut() {
|
||||||
*ty = table.resolve_completely(ty.clone());
|
*ty = table.resolve_completely(ty.clone());
|
||||||
|
*has_errors = *has_errors || ty.contains_unknown();
|
||||||
}
|
}
|
||||||
|
|
||||||
*has_errors = !type_mismatches.is_empty();
|
*has_errors = !type_mismatches.is_empty();
|
||||||
|
@ -835,11 +840,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
let return_ty = if let Some(rpits) = self.db.return_type_impl_traits(func) {
|
let return_ty = if let Some(rpits) = self.db.return_type_impl_traits(func) {
|
||||||
// RPIT opaque types use substitution of their parent function.
|
// RPIT opaque types use substitution of their parent function.
|
||||||
let fn_placeholders = TyBuilder::placeholder_subst(self.db, func);
|
let fn_placeholders = TyBuilder::placeholder_subst(self.db, func);
|
||||||
let result = self.insert_inference_vars_for_impl_trait(
|
let result = self.insert_inference_vars_for_impl_trait(return_ty, fn_placeholders);
|
||||||
return_ty,
|
|
||||||
rpits.clone(),
|
|
||||||
fn_placeholders,
|
|
||||||
);
|
|
||||||
let rpits = rpits.skip_binders();
|
let rpits = rpits.skip_binders();
|
||||||
for (id, _) in rpits.impl_traits.iter() {
|
for (id, _) in rpits.impl_traits.iter() {
|
||||||
if let Entry::Vacant(e) = self.result.type_of_rpit.entry(id) {
|
if let Entry::Vacant(e) = self.result.type_of_rpit.entry(id) {
|
||||||
|
@ -862,12 +863,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
self.insert_atpit_coercion_table(params_and_ret_tys.iter());
|
self.insert_atpit_coercion_table(params_and_ret_tys.iter());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_inference_vars_for_impl_trait<T>(
|
fn insert_inference_vars_for_impl_trait<T>(&mut self, t: T, placeholders: Substitution) -> T
|
||||||
&mut self,
|
|
||||||
t: T,
|
|
||||||
rpits: Arc<chalk_ir::Binders<crate::ImplTraits>>,
|
|
||||||
placeholders: Substitution,
|
|
||||||
) -> T
|
|
||||||
where
|
where
|
||||||
T: crate::HasInterner<Interner = Interner> + crate::TypeFoldable<Interner>,
|
T: crate::HasInterner<Interner = Interner> + crate::TypeFoldable<Interner>,
|
||||||
{
|
{
|
||||||
|
@ -878,13 +874,21 @@ impl<'a> InferenceContext<'a> {
|
||||||
TyKind::OpaqueType(opaque_ty_id, _) => *opaque_ty_id,
|
TyKind::OpaqueType(opaque_ty_id, _) => *opaque_ty_id,
|
||||||
_ => return ty,
|
_ => return ty,
|
||||||
};
|
};
|
||||||
let idx = match self.db.lookup_intern_impl_trait_id(opaque_ty_id.into()) {
|
let (impl_traits, idx) =
|
||||||
ImplTraitId::ReturnTypeImplTrait(_, idx) => idx,
|
match self.db.lookup_intern_impl_trait_id(opaque_ty_id.into()) {
|
||||||
ImplTraitId::AssociatedTypeImplTrait(_, idx) => idx,
|
ImplTraitId::ReturnTypeImplTrait(def, idx) => {
|
||||||
_ => unreachable!(),
|
(self.db.return_type_impl_traits(def), idx)
|
||||||
|
}
|
||||||
|
ImplTraitId::AssociatedTypeImplTrait(def, idx) => {
|
||||||
|
(self.db.type_alias_impl_traits(def), idx)
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
let Some(impl_traits) = impl_traits else {
|
||||||
|
return ty;
|
||||||
};
|
};
|
||||||
let bounds =
|
let bounds = (*impl_traits)
|
||||||
(*rpits).map_ref(|rpits| rpits.impl_traits[idx].bounds.map_ref(|it| it.iter()));
|
.map_ref(|rpits| rpits.impl_traits[idx].bounds.map_ref(|it| it.iter()));
|
||||||
let var = self.table.new_type_var();
|
let var = self.table.new_type_var();
|
||||||
let var_subst = Substitution::from1(Interner, var.clone());
|
let var_subst = Substitution::from1(Interner, var.clone());
|
||||||
for bound in bounds {
|
for bound in bounds {
|
||||||
|
@ -892,11 +896,8 @@ impl<'a> InferenceContext<'a> {
|
||||||
let (var_predicate, binders) =
|
let (var_predicate, binders) =
|
||||||
predicate.substitute(Interner, &var_subst).into_value_and_skipped_binders();
|
predicate.substitute(Interner, &var_subst).into_value_and_skipped_binders();
|
||||||
always!(binders.is_empty(Interner)); // quantified where clauses not yet handled
|
always!(binders.is_empty(Interner)); // quantified where clauses not yet handled
|
||||||
let var_predicate = self.insert_inference_vars_for_impl_trait(
|
let var_predicate = self
|
||||||
var_predicate,
|
.insert_inference_vars_for_impl_trait(var_predicate, placeholders.clone());
|
||||||
rpits.clone(),
|
|
||||||
placeholders.clone(),
|
|
||||||
);
|
|
||||||
self.push_obligation(var_predicate.cast(Interner));
|
self.push_obligation(var_predicate.cast(Interner));
|
||||||
}
|
}
|
||||||
self.result.type_of_rpit.insert(idx, var.clone());
|
self.result.type_of_rpit.insert(idx, var.clone());
|
||||||
|
@ -983,16 +984,8 @@ impl<'a> InferenceContext<'a> {
|
||||||
self.db.lookup_intern_impl_trait_id(opaque_ty_id.into())
|
self.db.lookup_intern_impl_trait_id(opaque_ty_id.into())
|
||||||
{
|
{
|
||||||
if assoc_tys.contains(&alias_id) {
|
if assoc_tys.contains(&alias_id) {
|
||||||
let atpits = self
|
|
||||||
.db
|
|
||||||
.type_alias_impl_traits(alias_id)
|
|
||||||
.expect("Marked as ATPIT but no impl traits!");
|
|
||||||
let alias_placeholders = TyBuilder::placeholder_subst(self.db, alias_id);
|
let alias_placeholders = TyBuilder::placeholder_subst(self.db, alias_id);
|
||||||
let ty = self.insert_inference_vars_for_impl_trait(
|
let ty = self.insert_inference_vars_for_impl_trait(ty, alias_placeholders);
|
||||||
ty,
|
|
||||||
atpits,
|
|
||||||
alias_placeholders,
|
|
||||||
);
|
|
||||||
return Some((opaque_ty_id, ty));
|
return Some((opaque_ty_id, ty));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,7 +13,7 @@ use hir_def::{
|
||||||
},
|
},
|
||||||
lang_item::{LangItem, LangItemTarget},
|
lang_item::{LangItem, LangItemTarget},
|
||||||
path::{GenericArgs, Path},
|
path::{GenericArgs, Path},
|
||||||
BlockId, FieldId, GenericParamId, ItemContainerId, Lookup, TupleFieldId, TupleId,
|
BlockId, FieldId, GenericDefId, GenericParamId, ItemContainerId, Lookup, TupleFieldId, TupleId,
|
||||||
};
|
};
|
||||||
use hir_expand::name::{name, Name};
|
use hir_expand::name::{name, Name};
|
||||||
use stdx::always;
|
use stdx::always;
|
||||||
|
@ -440,7 +440,8 @@ impl InferenceContext<'_> {
|
||||||
let ty = match self.infer_path(p, tgt_expr.into()) {
|
let ty = match self.infer_path(p, tgt_expr.into()) {
|
||||||
Some(ty) => ty,
|
Some(ty) => ty,
|
||||||
None => {
|
None => {
|
||||||
if matches!(p, Path::Normal { mod_path, .. } if mod_path.is_ident()) {
|
if matches!(p, Path::Normal { mod_path, .. } if mod_path.is_ident() || mod_path.is_self())
|
||||||
|
{
|
||||||
self.push_diagnostic(InferenceDiagnostic::UnresolvedIdent {
|
self.push_diagnostic(InferenceDiagnostic::UnresolvedIdent {
|
||||||
expr: tgt_expr,
|
expr: tgt_expr,
|
||||||
});
|
});
|
||||||
|
@ -1895,7 +1896,8 @@ impl InferenceContext<'_> {
|
||||||
let callable_ty = self.resolve_ty_shallow(callable_ty);
|
let callable_ty = self.resolve_ty_shallow(callable_ty);
|
||||||
if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind(Interner) {
|
if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind(Interner) {
|
||||||
let def: CallableDefId = from_chalk(self.db, *fn_def);
|
let def: CallableDefId = from_chalk(self.db, *fn_def);
|
||||||
let generic_predicates = self.db.generic_predicates(def.into());
|
let generic_predicates =
|
||||||
|
self.db.generic_predicates(GenericDefId::from_callable(self.db.upcast(), def));
|
||||||
for predicate in generic_predicates.iter() {
|
for predicate in generic_predicates.iter() {
|
||||||
let (predicate, binders) = predicate
|
let (predicate, binders) = predicate
|
||||||
.clone()
|
.clone()
|
||||||
|
|
|
@ -41,14 +41,7 @@ impl InferenceContext<'_> {
|
||||||
fn resolve_value_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<ValuePathResolution> {
|
fn resolve_value_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<ValuePathResolution> {
|
||||||
let (value, self_subst) = self.resolve_value_path_inner(path, id)?;
|
let (value, self_subst) = self.resolve_value_path_inner(path, id)?;
|
||||||
|
|
||||||
let value_def = match value {
|
let value_def: ValueTyDefId = match value {
|
||||||
ValueNs::LocalBinding(pat) => match self.result.type_of_binding.get(pat) {
|
|
||||||
Some(ty) => return Some(ValuePathResolution::NonGeneric(ty.clone())),
|
|
||||||
None => {
|
|
||||||
never!("uninferred pattern?");
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
ValueNs::FunctionId(it) => it.into(),
|
ValueNs::FunctionId(it) => it.into(),
|
||||||
ValueNs::ConstId(it) => it.into(),
|
ValueNs::ConstId(it) => it.into(),
|
||||||
ValueNs::StaticId(it) => it.into(),
|
ValueNs::StaticId(it) => it.into(),
|
||||||
|
@ -62,48 +55,79 @@ impl InferenceContext<'_> {
|
||||||
|
|
||||||
it.into()
|
it.into()
|
||||||
}
|
}
|
||||||
|
ValueNs::LocalBinding(pat) => {
|
||||||
|
return match self.result.type_of_binding.get(pat) {
|
||||||
|
Some(ty) => Some(ValuePathResolution::NonGeneric(ty.clone())),
|
||||||
|
None => {
|
||||||
|
never!("uninferred pattern?");
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
ValueNs::ImplSelf(impl_id) => {
|
ValueNs::ImplSelf(impl_id) => {
|
||||||
let generics = crate::generics::generics(self.db.upcast(), impl_id.into());
|
let generics = crate::generics::generics(self.db.upcast(), impl_id.into());
|
||||||
let substs = generics.placeholder_subst(self.db);
|
let substs = generics.placeholder_subst(self.db);
|
||||||
let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
|
let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
|
||||||
if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() {
|
return if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() {
|
||||||
return Some(ValuePathResolution::GenericDef(
|
Some(ValuePathResolution::GenericDef(
|
||||||
struct_id.into(),
|
struct_id.into(),
|
||||||
struct_id.into(),
|
struct_id.into(),
|
||||||
substs.clone(),
|
substs.clone(),
|
||||||
));
|
))
|
||||||
} else {
|
} else {
|
||||||
// FIXME: report error, invalid Self reference
|
// FIXME: report error, invalid Self reference
|
||||||
return None;
|
None
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
ValueNs::GenericParam(it) => {
|
ValueNs::GenericParam(it) => {
|
||||||
return Some(ValuePathResolution::NonGeneric(self.db.const_param_ty(it)))
|
return Some(ValuePathResolution::NonGeneric(self.db.const_param_ty(it)))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let generic_def_id = value_def.to_generic_def_id(self.db);
|
||||||
|
let Some(generic_def) = generic_def_id else {
|
||||||
|
// `value_def` is the kind of item that can never be generic (i.e. statics, at least
|
||||||
|
// currently). We can just skip the binders to get its type.
|
||||||
|
let (ty, binders) = self.db.value_ty(value_def)?.into_value_and_skipped_binders();
|
||||||
|
stdx::always!(binders.is_empty(Interner), "non-empty binders for non-generic def",);
|
||||||
|
return Some(ValuePathResolution::NonGeneric(ty));
|
||||||
|
};
|
||||||
|
|
||||||
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into());
|
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into());
|
||||||
let substs = ctx.substs_from_path(path, value_def, true);
|
let substs = ctx.substs_from_path(path, value_def, true);
|
||||||
let substs = substs.as_slice(Interner);
|
let substs = substs.as_slice(Interner);
|
||||||
|
|
||||||
|
if let ValueNs::EnumVariantId(_) = value {
|
||||||
|
let mut it = self_subst
|
||||||
|
.as_ref()
|
||||||
|
.map_or(&[][..], |s| s.as_slice(Interner))
|
||||||
|
.iter()
|
||||||
|
.chain(substs)
|
||||||
|
.cloned();
|
||||||
|
let builder = TyBuilder::subst_for_def(self.db, generic_def, None);
|
||||||
|
let substs = builder
|
||||||
|
.fill(|x| {
|
||||||
|
it.next().unwrap_or_else(|| match x {
|
||||||
|
ParamKind::Type => {
|
||||||
|
self.result.standard_types.unknown.clone().cast(Interner)
|
||||||
|
}
|
||||||
|
ParamKind::Const(ty) => consteval::unknown_const_as_generic(ty.clone()),
|
||||||
|
ParamKind::Lifetime => error_lifetime().cast(Interner),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.build();
|
||||||
|
|
||||||
|
return Some(ValuePathResolution::GenericDef(value_def, generic_def, substs));
|
||||||
|
}
|
||||||
|
|
||||||
let parent_substs = self_subst.or_else(|| {
|
let parent_substs = self_subst.or_else(|| {
|
||||||
let generics = generics(self.db.upcast(), value_def.to_generic_def_id()?);
|
let generics = generics(self.db.upcast(), generic_def_id?);
|
||||||
let parent_params_len = generics.parent_generics()?.len();
|
let parent_params_len = generics.parent_generics()?.len();
|
||||||
let parent_args = &substs[substs.len() - parent_params_len..];
|
let parent_args = &substs[substs.len() - parent_params_len..];
|
||||||
Some(Substitution::from_iter(Interner, parent_args))
|
Some(Substitution::from_iter(Interner, parent_args))
|
||||||
});
|
});
|
||||||
let parent_substs_len = parent_substs.as_ref().map_or(0, |s| s.len(Interner));
|
let parent_substs_len = parent_substs.as_ref().map_or(0, |s| s.len(Interner));
|
||||||
let mut it = substs.iter().take(substs.len() - parent_substs_len).cloned();
|
let mut it = substs.iter().take(substs.len() - parent_substs_len).cloned();
|
||||||
|
|
||||||
let Some(generic_def) = value_def.to_generic_def_id() else {
|
|
||||||
// `value_def` is the kind of item that can never be generic (i.e. statics, at least
|
|
||||||
// currently). We can just skip the binders to get its type.
|
|
||||||
let (ty, binders) = self.db.value_ty(value_def)?.into_value_and_skipped_binders();
|
|
||||||
stdx::always!(
|
|
||||||
parent_substs.is_none() && binders.is_empty(Interner),
|
|
||||||
"non-empty binders for non-generic def",
|
|
||||||
);
|
|
||||||
return Some(ValuePathResolution::NonGeneric(ty));
|
|
||||||
};
|
|
||||||
let builder = TyBuilder::subst_for_def(self.db, generic_def, parent_substs);
|
let builder = TyBuilder::subst_for_def(self.db, generic_def, parent_substs);
|
||||||
let substs = builder
|
let substs = builder
|
||||||
.fill(|x| {
|
.fill(|x| {
|
||||||
|
|
|
@ -265,8 +265,10 @@ pub fn layout_of_ty_query(
|
||||||
chalk_ir::Scalar::Float(f) => scalar(
|
chalk_ir::Scalar::Float(f) => scalar(
|
||||||
dl,
|
dl,
|
||||||
Primitive::Float(match f {
|
Primitive::Float(match f {
|
||||||
|
FloatTy::F16 => Float::F16,
|
||||||
FloatTy::F32 => Float::F32,
|
FloatTy::F32 => Float::F32,
|
||||||
FloatTy::F64 => Float::F64,
|
FloatTy::F64 => Float::F64,
|
||||||
|
FloatTy::F128 => Float::F128,
|
||||||
}),
|
}),
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
|
|
@ -426,6 +426,7 @@ fn enums() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn primitives() {
|
fn primitives() {
|
||||||
|
// FIXME(#17451): Add `f16` and `f128` once they are stabilised.
|
||||||
size_and_align! {
|
size_and_align! {
|
||||||
struct Goal(i32, i128, isize, usize, f32, f64, bool, char);
|
struct Goal(i32, i128, isize, usize, f32, f64, bool, char);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
//! The type system. We currently use this to infer types for completion, hover
|
//! The type system. We currently use this to infer types for completion, hover
|
||||||
//! information and various assists.
|
//! information and various assists.
|
||||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
|
||||||
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
||||||
|
|
||||||
#[cfg(feature = "in-rust-tree")]
|
#[cfg(feature = "in-rust-tree")]
|
||||||
|
@ -60,7 +60,7 @@ use chalk_ir::{
|
||||||
NoSolution,
|
NoSolution,
|
||||||
};
|
};
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir_def::{hir::ExprId, type_ref::Rawness, GeneralConstId, TypeOrConstParamId};
|
use hir_def::{hir::ExprId, type_ref::Rawness, CallableDefId, GeneralConstId, TypeOrConstParamId};
|
||||||
use hir_expand::name;
|
use hir_expand::name;
|
||||||
use la_arena::{Arena, Idx};
|
use la_arena::{Arena, Idx};
|
||||||
use mir::{MirEvalError, VTableMap};
|
use mir::{MirEvalError, VTableMap};
|
||||||
|
@ -84,8 +84,8 @@ pub use infer::{
|
||||||
};
|
};
|
||||||
pub use interner::Interner;
|
pub use interner::Interner;
|
||||||
pub use lower::{
|
pub use lower::{
|
||||||
associated_type_shorthand_candidates, CallableDefId, ImplTraitLoweringMode, ParamLoweringMode,
|
associated_type_shorthand_candidates, ImplTraitLoweringMode, ParamLoweringMode, TyDefId,
|
||||||
TyDefId, TyLoweringContext, ValueTyDefId,
|
TyLoweringContext, ValueTyDefId,
|
||||||
};
|
};
|
||||||
pub use mapping::{
|
pub use mapping::{
|
||||||
from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,
|
from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,
|
||||||
|
|
|
@ -11,10 +11,7 @@ use std::{
|
||||||
ops::{self, Not as _},
|
ops::{self, Not as _},
|
||||||
};
|
};
|
||||||
|
|
||||||
use base_db::{
|
use base_db::{salsa::Cycle, CrateId};
|
||||||
salsa::{Cycle, InternValueTrivial},
|
|
||||||
CrateId,
|
|
||||||
};
|
|
||||||
use chalk_ir::{
|
use chalk_ir::{
|
||||||
cast::Cast,
|
cast::Cast,
|
||||||
fold::{Shift, TypeFoldable},
|
fold::{Shift, TypeFoldable},
|
||||||
|
@ -38,10 +35,10 @@ use hir_def::{
|
||||||
type_ref::{
|
type_ref::{
|
||||||
ConstRef, LifetimeRef, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef,
|
ConstRef, LifetimeRef, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef,
|
||||||
},
|
},
|
||||||
AdtId, AssocItemId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, FunctionId,
|
AdtId, AssocItemId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId,
|
||||||
GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstLoc, ItemContainerId, LocalFieldId,
|
FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstLoc, ItemContainerId,
|
||||||
Lookup, ModuleDefId, StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, TypeOwnerId,
|
LocalFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId,
|
||||||
UnionId, VariantId,
|
TypeOwnerId, UnionId, VariantId,
|
||||||
};
|
};
|
||||||
use hir_expand::{name::Name, ExpandResult};
|
use hir_expand::{name::Name, ExpandResult};
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
|
@ -387,14 +384,18 @@ impl<'a> TyLoweringContext<'a> {
|
||||||
type_params,
|
type_params,
|
||||||
const_params,
|
const_params,
|
||||||
_impl_trait_params,
|
_impl_trait_params,
|
||||||
_lifetime_params,
|
lifetime_params,
|
||||||
) = self
|
) = self
|
||||||
.generics()
|
.generics()
|
||||||
.expect("variable impl trait lowering must be in a generic def")
|
.expect("variable impl trait lowering must be in a generic def")
|
||||||
.provenance_split();
|
.provenance_split();
|
||||||
TyKind::BoundVar(BoundVar::new(
|
TyKind::BoundVar(BoundVar::new(
|
||||||
self.in_binders,
|
self.in_binders,
|
||||||
idx as usize + self_param as usize + type_params + const_params,
|
idx as usize
|
||||||
|
+ self_param as usize
|
||||||
|
+ type_params
|
||||||
|
+ const_params
|
||||||
|
+ lifetime_params,
|
||||||
))
|
))
|
||||||
.intern(Interner)
|
.intern(Interner)
|
||||||
}
|
}
|
||||||
|
@ -815,13 +816,13 @@ impl<'a> TyLoweringContext<'a> {
|
||||||
infer_args: bool,
|
infer_args: bool,
|
||||||
explicit_self_ty: Option<Ty>,
|
explicit_self_ty: Option<Ty>,
|
||||||
) -> Substitution {
|
) -> Substitution {
|
||||||
// Remember that the item's own generic args come before its parent's.
|
let Some(def) = def else { return Substitution::empty(Interner) };
|
||||||
let mut substs = Vec::new();
|
|
||||||
let def = if let Some(d) = def {
|
// Order is
|
||||||
d
|
// - Optional Self parameter
|
||||||
} else {
|
// - Lifetime parameters
|
||||||
return Substitution::empty(Interner);
|
// - Type or Const parameters
|
||||||
};
|
// - Parent parameters
|
||||||
let def_generics = generics(self.db.upcast(), def);
|
let def_generics = generics(self.db.upcast(), def);
|
||||||
let (
|
let (
|
||||||
parent_params,
|
parent_params,
|
||||||
|
@ -835,130 +836,121 @@ impl<'a> TyLoweringContext<'a> {
|
||||||
self_param as usize + type_params + const_params + impl_trait_params + lifetime_params;
|
self_param as usize + type_params + const_params + impl_trait_params + lifetime_params;
|
||||||
let total_len = parent_params + item_len;
|
let total_len = parent_params + item_len;
|
||||||
|
|
||||||
let ty_error = TyKind::Error.intern(Interner).cast(Interner);
|
let mut substs = Vec::new();
|
||||||
|
|
||||||
let mut def_generic_iter = def_generics.iter_id();
|
// we need to iterate the lifetime and type/const params separately as our order of them
|
||||||
|
// differs from the supplied syntax
|
||||||
|
|
||||||
let fill_self_params = || {
|
let ty_error = || TyKind::Error.intern(Interner).cast(Interner);
|
||||||
|
let mut def_toc_iter = def_generics.iter_self_type_or_consts_id();
|
||||||
|
let fill_self_param = || {
|
||||||
if self_param {
|
if self_param {
|
||||||
let self_ty =
|
let self_ty = explicit_self_ty.map(|x| x.cast(Interner)).unwrap_or_else(ty_error);
|
||||||
explicit_self_ty.map(|x| x.cast(Interner)).unwrap_or_else(|| ty_error.clone());
|
|
||||||
|
|
||||||
if let Some(id) = def_generic_iter.next() {
|
if let Some(id) = def_toc_iter.next() {
|
||||||
assert!(matches!(
|
assert!(matches!(id, GenericParamId::TypeParamId(_)));
|
||||||
id,
|
|
||||||
GenericParamId::TypeParamId(_) | GenericParamId::LifetimeParamId(_)
|
|
||||||
));
|
|
||||||
substs.push(self_ty);
|
substs.push(self_ty);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let mut had_explicit_args = false;
|
let mut had_explicit_args = false;
|
||||||
|
|
||||||
if let Some(generic_args) = &args_and_bindings {
|
if let Some(&GenericArgs { ref args, has_self_type, .. }) = args_and_bindings {
|
||||||
if !generic_args.has_self_type {
|
// Fill in the self param first
|
||||||
fill_self_params();
|
if has_self_type && self_param {
|
||||||
}
|
had_explicit_args = true;
|
||||||
let expected_num = if generic_args.has_self_type {
|
if let Some(id) = def_toc_iter.next() {
|
||||||
self_param as usize + type_params + const_params
|
assert!(matches!(id, GenericParamId::TypeParamId(_)));
|
||||||
|
had_explicit_args = true;
|
||||||
|
if let GenericArg::Type(ty) = &args[0] {
|
||||||
|
substs.push(self.lower_ty(ty).cast(Interner));
|
||||||
|
}
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
type_params + const_params
|
fill_self_param()
|
||||||
};
|
};
|
||||||
let skip = if generic_args.has_self_type && !self_param { 1 } else { 0 };
|
|
||||||
// if args are provided, it should be all of them, but we can't rely on that
|
// Then fill in the supplied lifetime args, or error lifetimes if there are too few
|
||||||
for arg in generic_args
|
// (default lifetimes aren't a thing)
|
||||||
.args
|
for arg in args
|
||||||
|
.iter()
|
||||||
|
.filter_map(|arg| match arg {
|
||||||
|
GenericArg::Lifetime(arg) => Some(self.lower_lifetime(arg)),
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
.chain(iter::repeat(error_lifetime()))
|
||||||
|
.take(lifetime_params)
|
||||||
|
{
|
||||||
|
substs.push(arg.cast(Interner));
|
||||||
|
}
|
||||||
|
|
||||||
|
let skip = if has_self_type { 1 } else { 0 };
|
||||||
|
// Fill in supplied type and const args
|
||||||
|
// Note if non-lifetime args are provided, it should be all of them, but we can't rely on that
|
||||||
|
for (arg, id) in args
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|arg| !matches!(arg, GenericArg::Lifetime(_)))
|
.filter(|arg| !matches!(arg, GenericArg::Lifetime(_)))
|
||||||
.skip(skip)
|
.skip(skip)
|
||||||
.take(expected_num)
|
.take(type_params + const_params)
|
||||||
|
.zip(def_toc_iter)
|
||||||
{
|
{
|
||||||
if let Some(id) = def_generic_iter.next() {
|
had_explicit_args = true;
|
||||||
let arg = generic_arg_to_chalk(
|
let arg = generic_arg_to_chalk(
|
||||||
self.db,
|
self.db,
|
||||||
id,
|
id,
|
||||||
arg,
|
arg,
|
||||||
&mut (),
|
&mut (),
|
||||||
|_, type_ref| self.lower_ty(type_ref),
|
|_, type_ref| self.lower_ty(type_ref),
|
||||||
|_, const_ref, ty| self.lower_const(const_ref, ty),
|
|_, const_ref, ty| self.lower_const(const_ref, ty),
|
||||||
|_, lifetime_ref| self.lower_lifetime(lifetime_ref),
|
|_, lifetime_ref| self.lower_lifetime(lifetime_ref),
|
||||||
);
|
);
|
||||||
had_explicit_args = true;
|
substs.push(arg);
|
||||||
substs.push(arg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for arg in generic_args
|
|
||||||
.args
|
|
||||||
.iter()
|
|
||||||
.filter(|arg| matches!(arg, GenericArg::Lifetime(_)))
|
|
||||||
.take(lifetime_params)
|
|
||||||
{
|
|
||||||
// Taking into the fact that def_generic_iter will always have lifetimes at the end
|
|
||||||
// Should have some test cases tho to test this behaviour more properly
|
|
||||||
if let Some(id) = def_generic_iter.next() {
|
|
||||||
let arg = generic_arg_to_chalk(
|
|
||||||
self.db,
|
|
||||||
id,
|
|
||||||
arg,
|
|
||||||
&mut (),
|
|
||||||
|_, type_ref| self.lower_ty(type_ref),
|
|
||||||
|_, const_ref, ty| self.lower_const(const_ref, ty),
|
|
||||||
|_, lifetime_ref| self.lower_lifetime(lifetime_ref),
|
|
||||||
);
|
|
||||||
had_explicit_args = true;
|
|
||||||
substs.push(arg);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
fill_self_params();
|
fill_self_param();
|
||||||
}
|
}
|
||||||
|
|
||||||
// These params include those of parent.
|
let param_to_err = |id| match id {
|
||||||
let remaining_params: SmallVec<[_; 2]> = def_generic_iter
|
GenericParamId::ConstParamId(x) => unknown_const_as_generic(self.db.const_param_ty(x)),
|
||||||
.map(|id| match id {
|
GenericParamId::TypeParamId(_) => ty_error(),
|
||||||
GenericParamId::ConstParamId(x) => {
|
GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner),
|
||||||
unknown_const_as_generic(self.db.const_param_ty(x))
|
};
|
||||||
}
|
|
||||||
GenericParamId::TypeParamId(_) => ty_error.clone(),
|
|
||||||
GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner),
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
assert_eq!(remaining_params.len() + substs.len(), total_len);
|
|
||||||
|
|
||||||
// handle defaults. In expression or pattern path segments without
|
// handle defaults. In expression or pattern path segments without
|
||||||
// explicitly specified type arguments, missing type arguments are inferred
|
// explicitly specified type arguments, missing type arguments are inferred
|
||||||
// (i.e. defaults aren't used).
|
// (i.e. defaults aren't used).
|
||||||
// Generic parameters for associated types are not supposed to have defaults, so we just
|
// Generic parameters for associated types are not supposed to have defaults, so we just
|
||||||
// ignore them.
|
// ignore them.
|
||||||
let is_assoc_ty = if let GenericDefId::TypeAliasId(id) = def {
|
let is_assoc_ty = || match def {
|
||||||
let container = id.lookup(self.db.upcast()).container;
|
GenericDefId::TypeAliasId(id) => {
|
||||||
matches!(container, ItemContainerId::TraitId(_))
|
matches!(id.lookup(self.db.upcast()).container, ItemContainerId::TraitId(_))
|
||||||
} else {
|
}
|
||||||
false
|
_ => false,
|
||||||
};
|
};
|
||||||
if !is_assoc_ty && (!infer_args || had_explicit_args) {
|
let fill_defaults = (!infer_args || had_explicit_args) && !is_assoc_ty();
|
||||||
let defaults = self.db.generic_defaults(def);
|
if fill_defaults {
|
||||||
assert_eq!(total_len, defaults.len());
|
let defaults = &*self.db.generic_defaults(def);
|
||||||
|
let (item, _parent) = defaults.split_at(item_len);
|
||||||
let parent_from = item_len - substs.len();
|
let parent_from = item_len - substs.len();
|
||||||
|
|
||||||
for (idx, default_ty) in defaults[substs.len()..item_len].iter().enumerate() {
|
let mut rem =
|
||||||
|
def_generics.iter_id().skip(substs.len()).map(param_to_err).collect::<Vec<_>>();
|
||||||
|
// Fill in defaults for type/const params
|
||||||
|
for (idx, default_ty) in item[substs.len()..].iter().enumerate() {
|
||||||
// each default can depend on the previous parameters
|
// each default can depend on the previous parameters
|
||||||
let substs_so_far = Substitution::from_iter(
|
let substs_so_far = Substitution::from_iter(
|
||||||
Interner,
|
Interner,
|
||||||
substs.iter().cloned().chain(remaining_params[idx..].iter().cloned()),
|
substs.iter().cloned().chain(rem[idx..].iter().cloned()),
|
||||||
);
|
);
|
||||||
substs.push(default_ty.clone().substitute(Interner, &substs_so_far));
|
substs.push(default_ty.clone().substitute(Interner, &substs_so_far));
|
||||||
}
|
}
|
||||||
|
// Fill in remaining parent params
|
||||||
// Keep parent's params as unknown.
|
substs.extend(rem.drain(parent_from..));
|
||||||
let mut remaining_params = remaining_params;
|
|
||||||
substs.extend(remaining_params.drain(parent_from..));
|
|
||||||
} else {
|
} else {
|
||||||
substs.extend(remaining_params);
|
// Fill in remaining def params and parent params
|
||||||
|
substs.extend(def_generics.iter_id().skip(substs.len()).map(param_to_err));
|
||||||
}
|
}
|
||||||
|
|
||||||
assert_eq!(substs.len(), total_len);
|
assert_eq!(substs.len(), total_len, "expected {} substs, got {}", total_len, substs.len());
|
||||||
Substitution::from_iter(Interner, substs)
|
Substitution::from_iter(Interner, substs)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1535,7 +1527,7 @@ pub(crate) fn generic_predicates_for_param_query(
|
||||||
def: GenericDefId,
|
def: GenericDefId,
|
||||||
param_id: TypeOrConstParamId,
|
param_id: TypeOrConstParamId,
|
||||||
assoc_name: Option<Name>,
|
assoc_name: Option<Name>,
|
||||||
) -> Arc<[Binders<QuantifiedWhereClause>]> {
|
) -> GenericPredicates {
|
||||||
let resolver = def.resolver(db.upcast());
|
let resolver = def.resolver(db.upcast());
|
||||||
let ctx = if let GenericDefId::FunctionId(_) = def {
|
let ctx = if let GenericDefId::FunctionId(_) = def {
|
||||||
TyLoweringContext::new(db, &resolver, def.into())
|
TyLoweringContext::new(db, &resolver, def.into())
|
||||||
|
@ -1611,7 +1603,7 @@ pub(crate) fn generic_predicates_for_param_query(
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
predicates.into()
|
GenericPredicates(predicates.is_empty().not().then(|| predicates.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn generic_predicates_for_param_recover(
|
pub(crate) fn generic_predicates_for_param_recover(
|
||||||
|
@ -1620,15 +1612,15 @@ pub(crate) fn generic_predicates_for_param_recover(
|
||||||
_def: &GenericDefId,
|
_def: &GenericDefId,
|
||||||
_param_id: &TypeOrConstParamId,
|
_param_id: &TypeOrConstParamId,
|
||||||
_assoc_name: &Option<Name>,
|
_assoc_name: &Option<Name>,
|
||||||
) -> Arc<[Binders<QuantifiedWhereClause>]> {
|
) -> GenericPredicates {
|
||||||
Arc::from_iter(None)
|
GenericPredicates(None)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn trait_environment_for_body_query(
|
pub(crate) fn trait_environment_for_body_query(
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
def: DefWithBodyId,
|
def: DefWithBodyId,
|
||||||
) -> Arc<TraitEnvironment> {
|
) -> Arc<TraitEnvironment> {
|
||||||
let Some(def) = def.as_generic_def_id() else {
|
let Some(def) = def.as_generic_def_id(db.upcast()) else {
|
||||||
let krate = def.module(db.upcast()).krate();
|
let krate = def.module(db.upcast()).krate();
|
||||||
return TraitEnvironment::empty(krate);
|
return TraitEnvironment::empty(krate);
|
||||||
};
|
};
|
||||||
|
@ -1725,8 +1717,8 @@ pub(crate) fn generic_predicates_query(
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
|
if generics.len() > 0 {
|
||||||
if !subst.is_empty(Interner) {
|
let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
|
||||||
let explicitly_unsized_tys = ctx.unsized_types.into_inner();
|
let explicitly_unsized_tys = ctx.unsized_types.into_inner();
|
||||||
if let Some(implicitly_sized_predicates) =
|
if let Some(implicitly_sized_predicates) =
|
||||||
implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver)
|
implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver)
|
||||||
|
@ -1995,47 +1987,6 @@ fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
|
||||||
pub enum CallableDefId {
|
|
||||||
FunctionId(FunctionId),
|
|
||||||
StructId(StructId),
|
|
||||||
EnumVariantId(EnumVariantId),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl InternValueTrivial for CallableDefId {}
|
|
||||||
|
|
||||||
impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId);
|
|
||||||
impl From<CallableDefId> for ModuleDefId {
|
|
||||||
fn from(def: CallableDefId) -> ModuleDefId {
|
|
||||||
match def {
|
|
||||||
CallableDefId::FunctionId(f) => ModuleDefId::FunctionId(f),
|
|
||||||
CallableDefId::StructId(s) => ModuleDefId::AdtId(AdtId::StructId(s)),
|
|
||||||
CallableDefId::EnumVariantId(e) => ModuleDefId::EnumVariantId(e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CallableDefId {
|
|
||||||
pub fn krate(self, db: &dyn HirDatabase) -> CrateId {
|
|
||||||
let db = db.upcast();
|
|
||||||
match self {
|
|
||||||
CallableDefId::FunctionId(f) => f.krate(db),
|
|
||||||
CallableDefId::StructId(s) => s.krate(db),
|
|
||||||
CallableDefId::EnumVariantId(e) => e.krate(db),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<CallableDefId> for GenericDefId {
|
|
||||||
fn from(def: CallableDefId) -> GenericDefId {
|
|
||||||
match def {
|
|
||||||
CallableDefId::FunctionId(f) => f.into(),
|
|
||||||
CallableDefId::StructId(s) => s.into(),
|
|
||||||
CallableDefId::EnumVariantId(e) => e.into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
pub enum TyDefId {
|
pub enum TyDefId {
|
||||||
BuiltinType(BuiltinType),
|
BuiltinType(BuiltinType),
|
||||||
|
@ -2056,12 +2007,12 @@ pub enum ValueTyDefId {
|
||||||
impl_from!(FunctionId, StructId, UnionId, EnumVariantId, ConstId, StaticId for ValueTyDefId);
|
impl_from!(FunctionId, StructId, UnionId, EnumVariantId, ConstId, StaticId for ValueTyDefId);
|
||||||
|
|
||||||
impl ValueTyDefId {
|
impl ValueTyDefId {
|
||||||
pub(crate) fn to_generic_def_id(self) -> Option<GenericDefId> {
|
pub(crate) fn to_generic_def_id(self, db: &dyn HirDatabase) -> Option<GenericDefId> {
|
||||||
match self {
|
match self {
|
||||||
Self::FunctionId(id) => Some(id.into()),
|
Self::FunctionId(id) => Some(id.into()),
|
||||||
Self::StructId(id) => Some(id.into()),
|
Self::StructId(id) => Some(id.into()),
|
||||||
Self::UnionId(id) => Some(id.into()),
|
Self::UnionId(id) => Some(id.into()),
|
||||||
Self::EnumVariantId(var) => Some(var.into()),
|
Self::EnumVariantId(var) => Some(var.lookup(db.upcast()).parent.into()),
|
||||||
Self::ConstId(id) => Some(id.into()),
|
Self::ConstId(id) => Some(id.into()),
|
||||||
Self::StaticId(_) => None,
|
Self::StaticId(_) => None,
|
||||||
}
|
}
|
||||||
|
@ -2112,7 +2063,7 @@ pub(crate) fn impl_self_ty_query(db: &dyn HirDatabase, impl_id: ImplId) -> Binde
|
||||||
// returns None if def is a type arg
|
// returns None if def is a type arg
|
||||||
pub(crate) fn const_param_ty_query(db: &dyn HirDatabase, def: ConstParamId) -> Ty {
|
pub(crate) fn const_param_ty_query(db: &dyn HirDatabase, def: ConstParamId) -> Ty {
|
||||||
let parent_data = db.generic_params(def.parent());
|
let parent_data = db.generic_params(def.parent());
|
||||||
let data = &parent_data.type_or_consts[def.local_id()];
|
let data = &parent_data[def.local_id()];
|
||||||
let resolver = def.parent().resolver(db.upcast());
|
let resolver = def.parent().resolver(db.upcast());
|
||||||
let ctx = TyLoweringContext::new(db, &resolver, def.parent().into());
|
let ctx = TyLoweringContext::new(db, &resolver, def.parent().into());
|
||||||
match data {
|
match data {
|
||||||
|
|
|
@ -127,9 +127,11 @@ pub(crate) const ALL_INT_FPS: [TyFingerprint; 12] = [
|
||||||
TyFingerprint::Scalar(Scalar::Uint(UintTy::Usize)),
|
TyFingerprint::Scalar(Scalar::Uint(UintTy::Usize)),
|
||||||
];
|
];
|
||||||
|
|
||||||
pub(crate) const ALL_FLOAT_FPS: [TyFingerprint; 2] = [
|
pub(crate) const ALL_FLOAT_FPS: [TyFingerprint; 4] = [
|
||||||
|
TyFingerprint::Scalar(Scalar::Float(FloatTy::F16)),
|
||||||
TyFingerprint::Scalar(Scalar::Float(FloatTy::F32)),
|
TyFingerprint::Scalar(Scalar::Float(FloatTy::F32)),
|
||||||
TyFingerprint::Scalar(Scalar::Float(FloatTy::F64)),
|
TyFingerprint::Scalar(Scalar::Float(FloatTy::F64)),
|
||||||
|
TyFingerprint::Scalar(Scalar::Float(FloatTy::F128)),
|
||||||
];
|
];
|
||||||
|
|
||||||
type TraitFpMap = FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Box<[ImplId]>>>;
|
type TraitFpMap = FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Box<[ImplId]>>>;
|
||||||
|
@ -1322,7 +1324,7 @@ fn iterate_inherent_methods(
|
||||||
callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>,
|
callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>,
|
||||||
) -> ControlFlow<()> {
|
) -> ControlFlow<()> {
|
||||||
for &impl_id in impls.for_self_ty(self_ty) {
|
for &impl_id in impls.for_self_ty(self_ty) {
|
||||||
for &item in &table.db.impl_data(impl_id).items {
|
for &item in table.db.impl_data(impl_id).items.iter() {
|
||||||
let visible = match is_valid_impl_method_candidate(
|
let visible = match is_valid_impl_method_candidate(
|
||||||
table,
|
table,
|
||||||
self_ty,
|
self_ty,
|
||||||
|
|
|
@ -18,6 +18,10 @@ use hir_expand::{mod_path::ModPath, HirFileIdExt, InFile};
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
use la_arena::ArenaMap;
|
use la_arena::ArenaMap;
|
||||||
use rustc_abi::TargetDataLayout;
|
use rustc_abi::TargetDataLayout;
|
||||||
|
use rustc_apfloat::{
|
||||||
|
ieee::{Half as f16, Quad as f128},
|
||||||
|
Float,
|
||||||
|
};
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use stdx::never;
|
use stdx::never;
|
||||||
use syntax::{SyntaxNodePtr, TextRange};
|
use syntax::{SyntaxNodePtr, TextRange};
|
||||||
|
@ -55,6 +59,13 @@ macro_rules! from_bytes {
|
||||||
Err(_) => return Err(MirEvalError::InternalError(stringify!(mismatched size in constructing $ty).into())),
|
Err(_) => return Err(MirEvalError::InternalError(stringify!(mismatched size in constructing $ty).into())),
|
||||||
}))
|
}))
|
||||||
};
|
};
|
||||||
|
($apfloat:tt, $bits:tt, $value:expr) => {
|
||||||
|
// FIXME(#17451): Switch to builtin `f16` and `f128` once they are stable.
|
||||||
|
$apfloat::from_bits($bits::from_le_bytes(match ($value).try_into() {
|
||||||
|
Ok(it) => it,
|
||||||
|
Err(_) => return Err(MirEvalError::InternalError(stringify!(mismatched size in constructing $apfloat).into())),
|
||||||
|
}).into())
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! not_supported {
|
macro_rules! not_supported {
|
||||||
|
@ -1110,6 +1121,10 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
if let TyKind::Scalar(chalk_ir::Scalar::Float(f)) = ty.kind(Interner) {
|
if let TyKind::Scalar(chalk_ir::Scalar::Float(f)) = ty.kind(Interner) {
|
||||||
match f {
|
match f {
|
||||||
|
chalk_ir::FloatTy::F16 => {
|
||||||
|
let c = -from_bytes!(f16, u16, c);
|
||||||
|
Owned(u16::try_from(c.to_bits()).unwrap().to_le_bytes().into())
|
||||||
|
}
|
||||||
chalk_ir::FloatTy::F32 => {
|
chalk_ir::FloatTy::F32 => {
|
||||||
let c = -from_bytes!(f32, c);
|
let c = -from_bytes!(f32, c);
|
||||||
Owned(c.to_le_bytes().into())
|
Owned(c.to_le_bytes().into())
|
||||||
|
@ -1118,6 +1133,10 @@ impl Evaluator<'_> {
|
||||||
let c = -from_bytes!(f64, c);
|
let c = -from_bytes!(f64, c);
|
||||||
Owned(c.to_le_bytes().into())
|
Owned(c.to_le_bytes().into())
|
||||||
}
|
}
|
||||||
|
chalk_ir::FloatTy::F128 => {
|
||||||
|
let c = -from_bytes!(f128, u128, c);
|
||||||
|
Owned(c.to_bits().to_le_bytes().into())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let mut c = c.to_vec();
|
let mut c = c.to_vec();
|
||||||
|
@ -1169,6 +1188,39 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
if let TyKind::Scalar(chalk_ir::Scalar::Float(f)) = ty.kind(Interner) {
|
if let TyKind::Scalar(chalk_ir::Scalar::Float(f)) = ty.kind(Interner) {
|
||||||
match f {
|
match f {
|
||||||
|
chalk_ir::FloatTy::F16 => {
|
||||||
|
let l = from_bytes!(f16, u16, lc);
|
||||||
|
let r = from_bytes!(f16, u16, rc);
|
||||||
|
match op {
|
||||||
|
BinOp::Ge
|
||||||
|
| BinOp::Gt
|
||||||
|
| BinOp::Le
|
||||||
|
| BinOp::Lt
|
||||||
|
| BinOp::Eq
|
||||||
|
| BinOp::Ne => {
|
||||||
|
let r = op.run_compare(l, r) as u8;
|
||||||
|
Owned(vec![r])
|
||||||
|
}
|
||||||
|
BinOp::Add | BinOp::Sub | BinOp::Mul | BinOp::Div => {
|
||||||
|
let r = match op {
|
||||||
|
BinOp::Add => l + r,
|
||||||
|
BinOp::Sub => l - r,
|
||||||
|
BinOp::Mul => l * r,
|
||||||
|
BinOp::Div => l / r,
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
Owned(
|
||||||
|
u16::try_from(r.value.to_bits())
|
||||||
|
.unwrap()
|
||||||
|
.to_le_bytes()
|
||||||
|
.into(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
it => not_supported!(
|
||||||
|
"invalid binop {it:?} on floating point operators"
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
chalk_ir::FloatTy::F32 => {
|
chalk_ir::FloatTy::F32 => {
|
||||||
let l = from_bytes!(f32, lc);
|
let l = from_bytes!(f32, lc);
|
||||||
let r = from_bytes!(f32, rc);
|
let r = from_bytes!(f32, rc);
|
||||||
|
@ -1225,6 +1277,34 @@ impl Evaluator<'_> {
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
chalk_ir::FloatTy::F128 => {
|
||||||
|
let l = from_bytes!(f128, u128, lc);
|
||||||
|
let r = from_bytes!(f128, u128, rc);
|
||||||
|
match op {
|
||||||
|
BinOp::Ge
|
||||||
|
| BinOp::Gt
|
||||||
|
| BinOp::Le
|
||||||
|
| BinOp::Lt
|
||||||
|
| BinOp::Eq
|
||||||
|
| BinOp::Ne => {
|
||||||
|
let r = op.run_compare(l, r) as u8;
|
||||||
|
Owned(vec![r])
|
||||||
|
}
|
||||||
|
BinOp::Add | BinOp::Sub | BinOp::Mul | BinOp::Div => {
|
||||||
|
let r = match op {
|
||||||
|
BinOp::Add => l + r,
|
||||||
|
BinOp::Sub => l - r,
|
||||||
|
BinOp::Mul => l * r,
|
||||||
|
BinOp::Div => l / r,
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
Owned(r.value.to_bits().to_le_bytes().into())
|
||||||
|
}
|
||||||
|
it => not_supported!(
|
||||||
|
"invalid binop {it:?} on floating point operators"
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let is_signed = matches!(ty.as_builtin(), Some(BuiltinType::Int(_)));
|
let is_signed = matches!(ty.as_builtin(), Some(BuiltinType::Int(_)));
|
||||||
|
|
|
@ -627,6 +627,7 @@ impl Evaluator<'_> {
|
||||||
if let Some(name) = name.strip_prefix("atomic_") {
|
if let Some(name) = name.strip_prefix("atomic_") {
|
||||||
return self.exec_atomic_intrinsic(name, args, generic_args, destination, locals, span);
|
return self.exec_atomic_intrinsic(name, args, generic_args, destination, locals, span);
|
||||||
}
|
}
|
||||||
|
// FIXME(#17451): Add `f16` and `f128` intrinsics.
|
||||||
if let Some(name) = name.strip_suffix("f64") {
|
if let Some(name) = name.strip_suffix("f64") {
|
||||||
let result = match name {
|
let result = match name {
|
||||||
"sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
|
"sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
|
||||||
|
|
|
@ -19,6 +19,7 @@ use hir_def::{
|
||||||
};
|
};
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
use la_arena::ArenaMap;
|
use la_arena::ArenaMap;
|
||||||
|
use rustc_apfloat::Float;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use syntax::TextRange;
|
use syntax::TextRange;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
@ -183,7 +184,7 @@ impl MirLowerError {
|
||||||
},
|
},
|
||||||
MirLowerError::GenericArgNotProvided(id, subst) => {
|
MirLowerError::GenericArgNotProvided(id, subst) => {
|
||||||
let parent = id.parent;
|
let parent = id.parent;
|
||||||
let param = &db.generic_params(parent).type_or_consts[id.local_id];
|
let param = &db.generic_params(parent)[id.local_id];
|
||||||
writeln!(
|
writeln!(
|
||||||
f,
|
f,
|
||||||
"Generic arg not provided for {}",
|
"Generic arg not provided for {}",
|
||||||
|
@ -483,7 +484,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
Ok(Some(current))
|
Ok(Some(current))
|
||||||
}
|
}
|
||||||
ValueNs::GenericParam(p) => {
|
ValueNs::GenericParam(p) => {
|
||||||
let Some(def) = self.owner.as_generic_def_id() else {
|
let Some(def) = self.owner.as_generic_def_id(self.db.upcast()) else {
|
||||||
not_supported!("owner without generic def id");
|
not_supported!("owner without generic def id");
|
||||||
};
|
};
|
||||||
let gen = generics(self.db.upcast(), def);
|
let gen = generics(self.db.upcast(), def);
|
||||||
|
@ -1330,7 +1331,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn placeholder_subst(&mut self) -> Substitution {
|
fn placeholder_subst(&mut self) -> Substitution {
|
||||||
match self.owner.as_generic_def_id() {
|
match self.owner.as_generic_def_id(self.db.upcast()) {
|
||||||
Some(it) => TyBuilder::placeholder_subst(self.db, it),
|
Some(it) => TyBuilder::placeholder_subst(self.db, it),
|
||||||
None => Substitution::empty(Interner),
|
None => Substitution::empty(Interner),
|
||||||
}
|
}
|
||||||
|
@ -1432,10 +1433,14 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
hir_def::hir::Literal::Int(it, _) => Box::from(&it.to_le_bytes()[0..size()?]),
|
hir_def::hir::Literal::Int(it, _) => Box::from(&it.to_le_bytes()[0..size()?]),
|
||||||
hir_def::hir::Literal::Uint(it, _) => Box::from(&it.to_le_bytes()[0..size()?]),
|
hir_def::hir::Literal::Uint(it, _) => Box::from(&it.to_le_bytes()[0..size()?]),
|
||||||
hir_def::hir::Literal::Float(f, _) => match size()? {
|
hir_def::hir::Literal::Float(f, _) => match size()? {
|
||||||
8 => Box::new(f.into_f64().to_le_bytes()),
|
16 => Box::new(f.to_f128().to_bits().to_le_bytes()),
|
||||||
4 => Box::new(f.into_f32().to_le_bytes()),
|
8 => Box::new(f.to_f64().to_le_bytes()),
|
||||||
|
4 => Box::new(f.to_f32().to_le_bytes()),
|
||||||
|
2 => Box::new(u16::try_from(f.to_f16().to_bits()).unwrap().to_le_bytes()),
|
||||||
_ => {
|
_ => {
|
||||||
return Err(MirLowerError::TypeError("float with size other than 4 or 8 bytes"))
|
return Err(MirLowerError::TypeError(
|
||||||
|
"float with size other than 2, 4, 8 or 16 bytes",
|
||||||
|
))
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -2160,9 +2165,7 @@ pub fn lower_to_mir(
|
||||||
root_expr: ExprId,
|
root_expr: ExprId,
|
||||||
) -> Result<MirBody> {
|
) -> Result<MirBody> {
|
||||||
if infer.has_errors {
|
if infer.has_errors {
|
||||||
return Err(MirLowerError::TypeMismatch(
|
return Err(MirLowerError::TypeMismatch(None));
|
||||||
infer.type_mismatches().next().map(|(_, it)| it.clone()),
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
let mut ctx = MirLowerCtx::new(db, owner, body, infer);
|
let mut ctx = MirLowerCtx::new(db, owner, body, infer);
|
||||||
// 0 is return local
|
// 0 is return local
|
||||||
|
|
|
@ -302,7 +302,7 @@ pub fn monomorphized_mir_body_query(
|
||||||
subst: Substitution,
|
subst: Substitution,
|
||||||
trait_env: Arc<crate::TraitEnvironment>,
|
trait_env: Arc<crate::TraitEnvironment>,
|
||||||
) -> Result<Arc<MirBody>, MirLowerError> {
|
) -> Result<Arc<MirBody>, MirLowerError> {
|
||||||
let generics = owner.as_generic_def_id().map(|g_def| generics(db.upcast(), g_def));
|
let generics = owner.as_generic_def_id(db.upcast()).map(|g_def| generics(db.upcast(), g_def));
|
||||||
let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner };
|
let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner };
|
||||||
let body = db.mir_body(owner)?;
|
let body = db.mir_body(owner)?;
|
||||||
let mut body = (*body).clone();
|
let mut body = (*body).clone();
|
||||||
|
@ -327,7 +327,7 @@ pub fn monomorphized_mir_body_for_closure_query(
|
||||||
trait_env: Arc<crate::TraitEnvironment>,
|
trait_env: Arc<crate::TraitEnvironment>,
|
||||||
) -> Result<Arc<MirBody>, MirLowerError> {
|
) -> Result<Arc<MirBody>, MirLowerError> {
|
||||||
let InternedClosure(owner, _) = db.lookup_intern_closure(closure.into());
|
let InternedClosure(owner, _) = db.lookup_intern_closure(closure.into());
|
||||||
let generics = owner.as_generic_def_id().map(|g_def| generics(db.upcast(), g_def));
|
let generics = owner.as_generic_def_id(db.upcast()).map(|g_def| generics(db.upcast(), g_def));
|
||||||
let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner };
|
let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner };
|
||||||
let body = db.mir_body_for_closure(closure)?;
|
let body = db.mir_body_for_closure(closure)?;
|
||||||
let mut body = (*body).clone();
|
let mut body = (*body).clone();
|
||||||
|
@ -343,7 +343,7 @@ pub fn monomorphize_mir_body_bad(
|
||||||
trait_env: Arc<crate::TraitEnvironment>,
|
trait_env: Arc<crate::TraitEnvironment>,
|
||||||
) -> Result<MirBody, MirLowerError> {
|
) -> Result<MirBody, MirLowerError> {
|
||||||
let owner = body.owner;
|
let owner = body.owner;
|
||||||
let generics = owner.as_generic_def_id().map(|g_def| generics(db.upcast(), g_def));
|
let generics = owner.as_generic_def_id(db.upcast()).map(|g_def| generics(db.upcast(), g_def));
|
||||||
let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner };
|
let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner };
|
||||||
filler.fill_body(&mut body)?;
|
filler.fill_body(&mut body)?;
|
||||||
Ok(body)
|
Ok(body)
|
||||||
|
|
|
@ -27,8 +27,10 @@ pub fn uint_ty_to_string(ty: UintTy) -> &'static str {
|
||||||
|
|
||||||
pub fn float_ty_to_string(ty: FloatTy) -> &'static str {
|
pub fn float_ty_to_string(ty: FloatTy) -> &'static str {
|
||||||
match ty {
|
match ty {
|
||||||
|
FloatTy::F16 => "f16",
|
||||||
FloatTy::F32 => "f32",
|
FloatTy::F32 => "f32",
|
||||||
FloatTy::F64 => "f64",
|
FloatTy::F64 => "f64",
|
||||||
|
FloatTy::F128 => "f128",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -56,7 +58,9 @@ pub(super) fn uint_ty_from_builtin(t: BuiltinUint) -> UintTy {
|
||||||
|
|
||||||
pub(super) fn float_ty_from_builtin(t: BuiltinFloat) -> FloatTy {
|
pub(super) fn float_ty_from_builtin(t: BuiltinFloat) -> FloatTy {
|
||||||
match t {
|
match t {
|
||||||
|
BuiltinFloat::F16 => FloatTy::F16,
|
||||||
BuiltinFloat::F32 => FloatTy::F32,
|
BuiltinFloat::F32 => FloatTy::F32,
|
||||||
BuiltinFloat::F64 => FloatTy::F64,
|
BuiltinFloat::F64 => FloatTy::F64,
|
||||||
|
BuiltinFloat::F128 => FloatTy::F128,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -111,8 +111,10 @@ fn infer_literal_pattern() {
|
||||||
if let "foo" = any() {}
|
if let "foo" = any() {}
|
||||||
if let 1 = any() {}
|
if let 1 = any() {}
|
||||||
if let 1u32 = any() {}
|
if let 1u32 = any() {}
|
||||||
|
if let 1f16 = any() {}
|
||||||
if let 1f32 = any() {}
|
if let 1f32 = any() {}
|
||||||
if let 1.0 = any() {}
|
if let 1.0 = any() {}
|
||||||
|
if let 1f128 = any() {}
|
||||||
if let true = any() {}
|
if let true = any() {}
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
|
@ -121,7 +123,7 @@ fn infer_literal_pattern() {
|
||||||
19..26 'loop {}': !
|
19..26 'loop {}': !
|
||||||
24..26 '{}': ()
|
24..26 '{}': ()
|
||||||
37..38 'x': &'? i32
|
37..38 'x': &'? i32
|
||||||
46..208 '{ ...) {} }': ()
|
46..263 '{ ...) {} }': ()
|
||||||
52..75 'if let...y() {}': ()
|
52..75 'if let...y() {}': ()
|
||||||
55..72 'let "f... any()': bool
|
55..72 'let "f... any()': bool
|
||||||
59..64 '"foo"': &'static str
|
59..64 '"foo"': &'static str
|
||||||
|
@ -145,25 +147,39 @@ fn infer_literal_pattern() {
|
||||||
124..126 '{}': ()
|
124..126 '{}': ()
|
||||||
131..153 'if let...y() {}': ()
|
131..153 'if let...y() {}': ()
|
||||||
134..150 'let 1f... any()': bool
|
134..150 'let 1f... any()': bool
|
||||||
138..142 '1f32': f32
|
138..142 '1f16': f16
|
||||||
138..142 '1f32': f32
|
138..142 '1f16': f16
|
||||||
145..148 'any': fn any<f32>() -> f32
|
145..148 'any': fn any<f16>() -> f16
|
||||||
145..150 'any()': f32
|
145..150 'any()': f16
|
||||||
151..153 '{}': ()
|
151..153 '{}': ()
|
||||||
158..179 'if let...y() {}': ()
|
158..180 'if let...y() {}': ()
|
||||||
161..176 'let 1.0 = any()': bool
|
161..177 'let 1f... any()': bool
|
||||||
165..168 '1.0': f64
|
165..169 '1f32': f32
|
||||||
165..168 '1.0': f64
|
165..169 '1f32': f32
|
||||||
171..174 'any': fn any<f64>() -> f64
|
172..175 'any': fn any<f32>() -> f32
|
||||||
171..176 'any()': f64
|
172..177 'any()': f32
|
||||||
177..179 '{}': ()
|
178..180 '{}': ()
|
||||||
184..206 'if let...y() {}': ()
|
185..206 'if let...y() {}': ()
|
||||||
187..203 'let tr... any()': bool
|
188..203 'let 1.0 = any()': bool
|
||||||
191..195 'true': bool
|
192..195 '1.0': f64
|
||||||
191..195 'true': bool
|
192..195 '1.0': f64
|
||||||
198..201 'any': fn any<bool>() -> bool
|
198..201 'any': fn any<f64>() -> f64
|
||||||
198..203 'any()': bool
|
198..203 'any()': f64
|
||||||
204..206 '{}': ()
|
204..206 '{}': ()
|
||||||
|
211..234 'if let...y() {}': ()
|
||||||
|
214..231 'let 1f... any()': bool
|
||||||
|
218..223 '1f128': f128
|
||||||
|
218..223 '1f128': f128
|
||||||
|
226..229 'any': fn any<f128>() -> f128
|
||||||
|
226..231 'any()': f128
|
||||||
|
232..234 '{}': ()
|
||||||
|
239..261 'if let...y() {}': ()
|
||||||
|
242..258 'let tr... any()': bool
|
||||||
|
246..250 'true': bool
|
||||||
|
246..250 'true': bool
|
||||||
|
253..256 'any': fn any<bool>() -> bool
|
||||||
|
253..258 'any()': bool
|
||||||
|
259..261 '{}': ()
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1999,3 +1999,45 @@ where
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tait_async_stack_overflow_17199() {
|
||||||
|
check_types(
|
||||||
|
r#"
|
||||||
|
//- minicore: fmt, future
|
||||||
|
type Foo = impl core::fmt::Debug;
|
||||||
|
|
||||||
|
async fn foo() -> Foo {
|
||||||
|
()
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn test() {
|
||||||
|
let t = foo().await;
|
||||||
|
// ^ impl Debug
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn lifetime_params_move_param_defaults() {
|
||||||
|
check_types(
|
||||||
|
r#"
|
||||||
|
pub struct Thing<'s, T = u32>;
|
||||||
|
|
||||||
|
impl <'s> Thing<'s> {
|
||||||
|
pub fn new() -> Thing<'s> {
|
||||||
|
Thing
|
||||||
|
//^^^^^ Thing<'?, u32>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let scope =
|
||||||
|
//^^^^^ &'? Thing<'?, u32>
|
||||||
|
&Thing::new();
|
||||||
|
//^^^^^^^^^^^^ Thing<'?, u32>
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -397,8 +397,10 @@ fn infer_literals() {
|
||||||
r##"
|
r##"
|
||||||
fn test() {
|
fn test() {
|
||||||
5i32;
|
5i32;
|
||||||
|
5f16;
|
||||||
5f32;
|
5f32;
|
||||||
5f64;
|
5f64;
|
||||||
|
5f128;
|
||||||
"hello";
|
"hello";
|
||||||
b"bytes";
|
b"bytes";
|
||||||
'c';
|
'c';
|
||||||
|
@ -421,26 +423,28 @@ h";
|
||||||
}
|
}
|
||||||
"##,
|
"##,
|
||||||
expect![[r##"
|
expect![[r##"
|
||||||
18..478 '{ ... }': ()
|
18..515 '{ ... }': ()
|
||||||
32..36 '5i32': i32
|
32..36 '5i32': i32
|
||||||
50..54 '5f32': f32
|
50..54 '5f16': f16
|
||||||
68..72 '5f64': f64
|
68..72 '5f32': f32
|
||||||
86..93 '"hello"': &'static str
|
86..90 '5f64': f64
|
||||||
107..115 'b"bytes"': &'static [u8; 5]
|
104..109 '5f128': f128
|
||||||
129..132 ''c'': char
|
123..130 '"hello"': &'static str
|
||||||
146..150 'b'b'': u8
|
144..152 'b"bytes"': &'static [u8; 5]
|
||||||
164..168 '3.14': f64
|
166..169 ''c'': char
|
||||||
182..186 '5000': i32
|
183..187 'b'b'': u8
|
||||||
200..205 'false': bool
|
201..205 '3.14': f64
|
||||||
219..223 'true': bool
|
219..223 '5000': i32
|
||||||
237..333 'r#" ... "#': &'static str
|
237..242 'false': bool
|
||||||
347..357 'br#"yolo"#': &'static [u8; 4]
|
256..260 'true': bool
|
||||||
375..376 'a': &'static [u8; 4]
|
274..370 'r#" ... "#': &'static str
|
||||||
379..403 'b"a\x2... c"': &'static [u8; 4]
|
384..394 'br#"yolo"#': &'static [u8; 4]
|
||||||
421..422 'b': &'static [u8; 4]
|
412..413 'a': &'static [u8; 4]
|
||||||
425..433 'br"g\ h"': &'static [u8; 4]
|
416..440 'b"a\x2... c"': &'static [u8; 4]
|
||||||
451..452 'c': &'static [u8; 6]
|
458..459 'b': &'static [u8; 4]
|
||||||
455..467 'br#"x"\"yb"#': &'static [u8; 6]
|
462..470 'br"g\ h"': &'static [u8; 4]
|
||||||
|
488..489 'c': &'static [u8; 6]
|
||||||
|
492..504 'br#"x"\"yb"#': &'static [u8; 6]
|
||||||
"##]],
|
"##]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -4824,3 +4824,76 @@ fn foo() {
|
||||||
"#,
|
"#,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn nested_impl_traits() {
|
||||||
|
check_infer(
|
||||||
|
r#"
|
||||||
|
//- minicore: fn
|
||||||
|
trait Foo {}
|
||||||
|
|
||||||
|
trait Bar<T> {}
|
||||||
|
|
||||||
|
trait Baz {
|
||||||
|
type Assoc;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Qux<T> {
|
||||||
|
qux: T,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct S;
|
||||||
|
|
||||||
|
impl Foo for S {}
|
||||||
|
|
||||||
|
fn not_allowed1(f: impl Fn(impl Foo)) {
|
||||||
|
let foo = S;
|
||||||
|
f(foo);
|
||||||
|
}
|
||||||
|
|
||||||
|
// This caused stack overflow in #17498
|
||||||
|
fn not_allowed2(f: impl Fn(&impl Foo)) {
|
||||||
|
let foo = S;
|
||||||
|
f(&foo);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn not_allowed3(bar: impl Bar<impl Foo>) {}
|
||||||
|
|
||||||
|
// This also caused stack overflow
|
||||||
|
fn not_allowed4(bar: impl Bar<&impl Foo>) {}
|
||||||
|
|
||||||
|
fn allowed1(baz: impl Baz<Assoc = impl Foo>) {}
|
||||||
|
|
||||||
|
fn allowed2<'a>(baz: impl Baz<Assoc = &'a (impl Foo + 'a)>) {}
|
||||||
|
|
||||||
|
fn allowed3(baz: impl Baz<Assoc = Qux<impl Foo>>) {}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
139..140 'f': impl Fn({unknown}) + ?Sized
|
||||||
|
161..193 '{ ...oo); }': ()
|
||||||
|
171..174 'foo': S
|
||||||
|
177..178 'S': S
|
||||||
|
184..185 'f': impl Fn({unknown}) + ?Sized
|
||||||
|
184..190 'f(foo)': ()
|
||||||
|
186..189 'foo': S
|
||||||
|
251..252 'f': impl Fn(&'? {unknown}) + ?Sized
|
||||||
|
274..307 '{ ...oo); }': ()
|
||||||
|
284..287 'foo': S
|
||||||
|
290..291 'S': S
|
||||||
|
297..298 'f': impl Fn(&'? {unknown}) + ?Sized
|
||||||
|
297..304 'f(&foo)': ()
|
||||||
|
299..303 '&foo': &'? S
|
||||||
|
300..303 'foo': S
|
||||||
|
325..328 'bar': impl Bar<{unknown}> + ?Sized
|
||||||
|
350..352 '{}': ()
|
||||||
|
405..408 'bar': impl Bar<&'? {unknown}> + ?Sized
|
||||||
|
431..433 '{}': ()
|
||||||
|
447..450 'baz': impl Baz<Assoc = impl Foo + ?Sized> + ?Sized
|
||||||
|
480..482 '{}': ()
|
||||||
|
500..503 'baz': impl Baz<Assoc = &'a impl Foo + 'a + ?Sized> + ?Sized
|
||||||
|
544..546 '{}': ()
|
||||||
|
560..563 'baz': impl Baz<Assoc = Qux<impl Foo + ?Sized>> + ?Sized
|
||||||
|
598..600 '{}': ()
|
||||||
|
"#]],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
|
@ -157,8 +157,7 @@ fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(Tra
|
||||||
let generic_params = db.generic_params(trait_.into());
|
let generic_params = db.generic_params(trait_.into());
|
||||||
let trait_self = generic_params.trait_self_param();
|
let trait_self = generic_params.trait_self_param();
|
||||||
generic_params
|
generic_params
|
||||||
.where_predicates
|
.where_predicates()
|
||||||
.iter()
|
|
||||||
.filter_map(|pred| match pred {
|
.filter_map(|pred| match pred {
|
||||||
WherePredicate::ForLifetime { target, bound, .. }
|
WherePredicate::ForLifetime { target, bound, .. }
|
||||||
| WherePredicate::TypeBound { target, bound } => {
|
| WherePredicate::TypeBound { target, bound } => {
|
||||||
|
|
|
@ -452,7 +452,7 @@ impl HirDisplay for TypeOrConstParam {
|
||||||
impl HirDisplay for TypeParam {
|
impl HirDisplay for TypeParam {
|
||||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||||
let params = f.db.generic_params(self.id.parent());
|
let params = f.db.generic_params(self.id.parent());
|
||||||
let param_data = ¶ms.type_or_consts[self.id.local_id()];
|
let param_data = ¶ms[self.id.local_id()];
|
||||||
let substs = TyBuilder::placeholder_subst(f.db, self.id.parent());
|
let substs = TyBuilder::placeholder_subst(f.db, self.id.parent());
|
||||||
let krate = self.id.parent().krate(f.db).id;
|
let krate = self.id.parent().krate(f.db).id;
|
||||||
let ty =
|
let ty =
|
||||||
|
@ -539,11 +539,10 @@ fn write_generic_params(
|
||||||
f: &mut HirFormatter<'_>,
|
f: &mut HirFormatter<'_>,
|
||||||
) -> Result<(), HirDisplayError> {
|
) -> Result<(), HirDisplayError> {
|
||||||
let params = f.db.generic_params(def);
|
let params = f.db.generic_params(def);
|
||||||
if params.lifetimes.is_empty()
|
if params.iter_lt().next().is_none()
|
||||||
&& params.type_or_consts.iter().all(|it| it.1.const_param().is_none())
|
&& params.iter_type_or_consts().all(|it| it.1.const_param().is_none())
|
||||||
&& params
|
&& params
|
||||||
.type_or_consts
|
.iter_type_or_consts()
|
||||||
.iter()
|
|
||||||
.filter_map(|it| it.1.type_param())
|
.filter_map(|it| it.1.type_param())
|
||||||
.all(|param| !matches!(param.provenance, TypeParamProvenance::TypeParamList))
|
.all(|param| !matches!(param.provenance, TypeParamProvenance::TypeParamList))
|
||||||
{
|
{
|
||||||
|
@ -560,11 +559,11 @@ fn write_generic_params(
|
||||||
f.write_str(", ")
|
f.write_str(", ")
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
for (_, lifetime) in params.lifetimes.iter() {
|
for (_, lifetime) in params.iter_lt() {
|
||||||
delim(f)?;
|
delim(f)?;
|
||||||
write!(f, "{}", lifetime.name.display(f.db.upcast()))?;
|
write!(f, "{}", lifetime.name.display(f.db.upcast()))?;
|
||||||
}
|
}
|
||||||
for (_, ty) in params.type_or_consts.iter() {
|
for (_, ty) in params.iter_type_or_consts() {
|
||||||
if let Some(name) = &ty.name() {
|
if let Some(name) = &ty.name() {
|
||||||
match ty {
|
match ty {
|
||||||
TypeOrConstParamData::TypeParamData(ty) => {
|
TypeOrConstParamData::TypeParamData(ty) => {
|
||||||
|
@ -612,11 +611,11 @@ fn write_where_clause(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn has_disaplayable_predicates(params: &Interned<GenericParams>) -> bool {
|
fn has_disaplayable_predicates(params: &Interned<GenericParams>) -> bool {
|
||||||
params.where_predicates.iter().any(|pred| {
|
params.where_predicates().any(|pred| {
|
||||||
!matches!(
|
!matches!(
|
||||||
pred,
|
pred,
|
||||||
WherePredicate::TypeBound { target: WherePredicateTypeTarget::TypeOrConstParam(id), .. }
|
WherePredicate::TypeBound { target: WherePredicateTypeTarget::TypeOrConstParam(id), .. }
|
||||||
if params.type_or_consts[*id].name().is_none()
|
if params[*id].name().is_none()
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -631,13 +630,13 @@ fn write_where_predicates(
|
||||||
let is_unnamed_type_target =
|
let is_unnamed_type_target =
|
||||||
|params: &Interned<GenericParams>, target: &WherePredicateTypeTarget| {
|
|params: &Interned<GenericParams>, target: &WherePredicateTypeTarget| {
|
||||||
matches!(target,
|
matches!(target,
|
||||||
WherePredicateTypeTarget::TypeOrConstParam(id) if params.type_or_consts[*id].name().is_none()
|
WherePredicateTypeTarget::TypeOrConstParam(id) if params[*id].name().is_none()
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
let write_target = |target: &WherePredicateTypeTarget, f: &mut HirFormatter<'_>| match target {
|
let write_target = |target: &WherePredicateTypeTarget, f: &mut HirFormatter<'_>| match target {
|
||||||
WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f),
|
WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f),
|
||||||
WherePredicateTypeTarget::TypeOrConstParam(id) => match params.type_or_consts[*id].name() {
|
WherePredicateTypeTarget::TypeOrConstParam(id) => match params[*id].name() {
|
||||||
Some(name) => write!(f, "{}", name.display(f.db.upcast())),
|
Some(name) => write!(f, "{}", name.display(f.db.upcast())),
|
||||||
None => f.write_str("{unnamed}"),
|
None => f.write_str("{unnamed}"),
|
||||||
},
|
},
|
||||||
|
@ -653,7 +652,7 @@ fn write_where_predicates(
|
||||||
_ => false,
|
_ => false,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut iter = params.where_predicates.iter().peekable();
|
let mut iter = params.where_predicates().peekable();
|
||||||
while let Some(pred) = iter.next() {
|
while let Some(pred) = iter.next() {
|
||||||
if matches!(pred, TypeBound { target, .. } if is_unnamed_type_target(params, target)) {
|
if matches!(pred, TypeBound { target, .. } if is_unnamed_type_target(params, target)) {
|
||||||
continue;
|
continue;
|
||||||
|
|
|
@ -182,7 +182,6 @@ impl From<GenericDef> for GenericDefId {
|
||||||
GenericDef::TraitAlias(it) => GenericDefId::TraitAliasId(it.id),
|
GenericDef::TraitAlias(it) => GenericDefId::TraitAliasId(it.id),
|
||||||
GenericDef::TypeAlias(it) => GenericDefId::TypeAliasId(it.id),
|
GenericDef::TypeAlias(it) => GenericDefId::TypeAliasId(it.id),
|
||||||
GenericDef::Impl(it) => GenericDefId::ImplId(it.id),
|
GenericDef::Impl(it) => GenericDefId::ImplId(it.id),
|
||||||
GenericDef::Variant(it) => GenericDefId::EnumVariantId(it.into()),
|
|
||||||
GenericDef::Const(it) => GenericDefId::ConstId(it.id),
|
GenericDef::Const(it) => GenericDefId::ConstId(it.id),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -197,7 +196,6 @@ impl From<GenericDefId> for GenericDef {
|
||||||
GenericDefId::TraitAliasId(it) => GenericDef::TraitAlias(it.into()),
|
GenericDefId::TraitAliasId(it) => GenericDef::TraitAlias(it.into()),
|
||||||
GenericDefId::TypeAliasId(it) => GenericDef::TypeAlias(it.into()),
|
GenericDefId::TypeAliasId(it) => GenericDef::TypeAlias(it.into()),
|
||||||
GenericDefId::ImplId(it) => GenericDef::Impl(it.into()),
|
GenericDefId::ImplId(it) => GenericDef::Impl(it.into()),
|
||||||
GenericDefId::EnumVariantId(it) => GenericDef::Variant(it.into()),
|
|
||||||
GenericDefId::ConstId(it) => GenericDef::Const(it.into()),
|
GenericDefId::ConstId(it) => GenericDef::Const(it.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,10 +5,10 @@ use either::Either;
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
nameres::{ModuleOrigin, ModuleSource},
|
nameres::{ModuleOrigin, ModuleSource},
|
||||||
src::{HasChildSource, HasSource as _},
|
src::{HasChildSource, HasSource as _},
|
||||||
Lookup, MacroId, VariantId,
|
CallableDefId, Lookup, MacroId, VariantId,
|
||||||
};
|
};
|
||||||
use hir_expand::{HirFileId, InFile};
|
use hir_expand::{HirFileId, InFile};
|
||||||
use hir_ty::{db::InternedClosure, CallableDefId};
|
use hir_ty::db::InternedClosure;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use tt::TextRange;
|
use tt::TextRange;
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,6 @@
|
||||||
//! from the ide with completions, hovers, etc. It is a (soft, internal) boundary:
|
//! from the ide with completions, hovers, etc. It is a (soft, internal) boundary:
|
||||||
//! <https://www.tedinski.com/2018/02/06/system-boundaries.html>.
|
//! <https://www.tedinski.com/2018/02/06/system-boundaries.html>.
|
||||||
|
|
||||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
|
||||||
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
||||||
#![recursion_limit = "512"]
|
#![recursion_limit = "512"]
|
||||||
|
|
||||||
|
@ -52,11 +51,11 @@ use hir_def::{
|
||||||
path::ImportAlias,
|
path::ImportAlias,
|
||||||
per_ns::PerNs,
|
per_ns::PerNs,
|
||||||
resolver::{HasResolver, Resolver},
|
resolver::{HasResolver, Resolver},
|
||||||
AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId,
|
AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId, CrateRootModuleId,
|
||||||
EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule,
|
DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId,
|
||||||
ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalFieldId, Lookup, MacroExpander,
|
HasModule, ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalFieldId, Lookup,
|
||||||
ModuleId, StaticId, StructId, TraitAliasId, TraitId, TupleId, TypeAliasId, TypeOrConstParamId,
|
MacroExpander, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TupleId, TypeAliasId,
|
||||||
TypeParamId, UnionId,
|
TypeOrConstParamId, TypeParamId, UnionId,
|
||||||
};
|
};
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
attrs::collect_attrs, name::name, proc_macro::ProcMacroKind, AstId, MacroCallKind, ValueResult,
|
attrs::collect_attrs, name::name, proc_macro::ProcMacroKind, AstId, MacroCallKind, ValueResult,
|
||||||
|
@ -71,7 +70,7 @@ use hir_ty::{
|
||||||
mir::{interpret_mir, MutBorrowKind},
|
mir::{interpret_mir, MutBorrowKind},
|
||||||
primitive::UintTy,
|
primitive::UintTy,
|
||||||
traits::FnTrait,
|
traits::FnTrait,
|
||||||
AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg,
|
AliasTy, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg,
|
||||||
GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution,
|
GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution,
|
||||||
TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, ValueTyDefId,
|
TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, ValueTyDefId,
|
||||||
WhereClause,
|
WhereClause,
|
||||||
|
@ -666,7 +665,7 @@ impl Module {
|
||||||
}
|
}
|
||||||
let parent = impl_def.id.into();
|
let parent = impl_def.id.into();
|
||||||
let generic_params = db.generic_params(parent);
|
let generic_params = db.generic_params(parent);
|
||||||
let lifetime_params = generic_params.lifetimes.iter().map(|(local_id, _)| {
|
let lifetime_params = generic_params.iter_lt().map(|(local_id, _)| {
|
||||||
GenericParamId::LifetimeParamId(LifetimeParamId { parent, local_id })
|
GenericParamId::LifetimeParamId(LifetimeParamId { parent, local_id })
|
||||||
});
|
});
|
||||||
let type_params = generic_params
|
let type_params = generic_params
|
||||||
|
@ -760,7 +759,7 @@ impl Module {
|
||||||
impl_assoc_items_scratch.clear();
|
impl_assoc_items_scratch.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
for &item in &db.impl_data(impl_def.id).items {
|
for &item in db.impl_data(impl_def.id).items.iter() {
|
||||||
AssocItem::from(item).diagnostics(db, acc, style_lints);
|
AssocItem::from(item).diagnostics(db, acc, style_lints);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1144,7 +1143,7 @@ impl Field {
|
||||||
let generic_def_id: GenericDefId = match self.parent {
|
let generic_def_id: GenericDefId = match self.parent {
|
||||||
VariantDef::Struct(it) => it.id.into(),
|
VariantDef::Struct(it) => it.id.into(),
|
||||||
VariantDef::Union(it) => it.id.into(),
|
VariantDef::Union(it) => it.id.into(),
|
||||||
VariantDef::Variant(it) => it.id.into(),
|
VariantDef::Variant(it) => it.id.lookup(db.upcast()).parent.into(),
|
||||||
};
|
};
|
||||||
let substs = TyBuilder::placeholder_subst(db, generic_def_id);
|
let substs = TyBuilder::placeholder_subst(db, generic_def_id);
|
||||||
let ty = db.field_types(var_id)[self.id].clone().substitute(Interner, &substs);
|
let ty = db.field_types(var_id)[self.id].clone().substitute(Interner, &substs);
|
||||||
|
@ -1177,7 +1176,9 @@ impl Field {
|
||||||
db.layout_of_ty(
|
db.layout_of_ty(
|
||||||
self.ty(db).ty,
|
self.ty(db).ty,
|
||||||
db.trait_environment(match hir_def::VariantId::from(self.parent) {
|
db.trait_environment(match hir_def::VariantId::from(self.parent) {
|
||||||
hir_def::VariantId::EnumVariantId(id) => GenericDefId::EnumVariantId(id),
|
hir_def::VariantId::EnumVariantId(id) => {
|
||||||
|
GenericDefId::AdtId(id.lookup(db.upcast()).parent.into())
|
||||||
|
}
|
||||||
hir_def::VariantId::StructId(id) => GenericDefId::AdtId(id.into()),
|
hir_def::VariantId::StructId(id) => GenericDefId::AdtId(id.into()),
|
||||||
hir_def::VariantId::UnionId(id) => GenericDefId::AdtId(id.into()),
|
hir_def::VariantId::UnionId(id) => GenericDefId::AdtId(id.into()),
|
||||||
}),
|
}),
|
||||||
|
@ -1539,8 +1540,7 @@ impl Adt {
|
||||||
resolver
|
resolver
|
||||||
.generic_params()
|
.generic_params()
|
||||||
.and_then(|gp| {
|
.and_then(|gp| {
|
||||||
gp.lifetimes
|
gp.iter_lt()
|
||||||
.iter()
|
|
||||||
// there should only be a single lifetime
|
// there should only be a single lifetime
|
||||||
// but `Arena` requires to use an iterator
|
// but `Arena` requires to use an iterator
|
||||||
.nth(0)
|
.nth(0)
|
||||||
|
@ -2501,9 +2501,8 @@ impl Trait {
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
count_required_only: bool,
|
count_required_only: bool,
|
||||||
) -> usize {
|
) -> usize {
|
||||||
db.generic_params(GenericDefId::from(self.id))
|
db.generic_params(self.id.into())
|
||||||
.type_or_consts
|
.iter_type_or_consts()
|
||||||
.iter()
|
|
||||||
.filter(|(_, ty)| !matches!(ty, TypeOrConstParamData::TypeParamData(ty) if ty.provenance != TypeParamProvenance::TypeParamList))
|
.filter(|(_, ty)| !matches!(ty, TypeOrConstParamData::TypeParamData(ty) if ty.provenance != TypeParamProvenance::TypeParamList))
|
||||||
.filter(|(_, ty)| !count_required_only || !ty.has_default())
|
.filter(|(_, ty)| !count_required_only || !ty.has_default())
|
||||||
.count()
|
.count()
|
||||||
|
@ -2623,6 +2622,13 @@ impl BuiltinType {
|
||||||
matches!(self.inner, hir_def::builtin_type::BuiltinType::Float(_))
|
matches!(self.inner, hir_def::builtin_type::BuiltinType::Float(_))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_f16(&self) -> bool {
|
||||||
|
matches!(
|
||||||
|
self.inner,
|
||||||
|
hir_def::builtin_type::BuiltinType::Float(hir_def::builtin_type::BuiltinFloat::F16)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn is_f32(&self) -> bool {
|
pub fn is_f32(&self) -> bool {
|
||||||
matches!(
|
matches!(
|
||||||
self.inner,
|
self.inner,
|
||||||
|
@ -2637,6 +2643,13 @@ impl BuiltinType {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_f128(&self) -> bool {
|
||||||
|
matches!(
|
||||||
|
self.inner,
|
||||||
|
hir_def::builtin_type::BuiltinType::Float(hir_def::builtin_type::BuiltinFloat::F128)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn is_char(&self) -> bool {
|
pub fn is_char(&self) -> bool {
|
||||||
matches!(self.inner, hir_def::builtin_type::BuiltinType::Char)
|
matches!(self.inner, hir_def::builtin_type::BuiltinType::Char)
|
||||||
}
|
}
|
||||||
|
@ -3107,9 +3120,6 @@ pub enum GenericDef {
|
||||||
TraitAlias(TraitAlias),
|
TraitAlias(TraitAlias),
|
||||||
TypeAlias(TypeAlias),
|
TypeAlias(TypeAlias),
|
||||||
Impl(Impl),
|
Impl(Impl),
|
||||||
// enum variants cannot have generics themselves, but their parent enums
|
|
||||||
// can, and this makes some code easier to write
|
|
||||||
Variant(Variant),
|
|
||||||
// consts can have type parameters from their parents (i.e. associated consts of traits)
|
// consts can have type parameters from their parents (i.e. associated consts of traits)
|
||||||
Const(Const),
|
Const(Const),
|
||||||
}
|
}
|
||||||
|
@ -3120,7 +3130,6 @@ impl_from!(
|
||||||
TraitAlias,
|
TraitAlias,
|
||||||
TypeAlias,
|
TypeAlias,
|
||||||
Impl,
|
Impl,
|
||||||
Variant,
|
|
||||||
Const
|
Const
|
||||||
for GenericDef
|
for GenericDef
|
||||||
);
|
);
|
||||||
|
@ -3128,7 +3137,7 @@ impl_from!(
|
||||||
impl GenericDef {
|
impl GenericDef {
|
||||||
pub fn params(self, db: &dyn HirDatabase) -> Vec<GenericParam> {
|
pub fn params(self, db: &dyn HirDatabase) -> Vec<GenericParam> {
|
||||||
let generics = db.generic_params(self.into());
|
let generics = db.generic_params(self.into());
|
||||||
let ty_params = generics.type_or_consts.iter().map(|(local_id, _)| {
|
let ty_params = generics.iter_type_or_consts().map(|(local_id, _)| {
|
||||||
let toc = TypeOrConstParam { id: TypeOrConstParamId { parent: self.into(), local_id } };
|
let toc = TypeOrConstParam { id: TypeOrConstParamId { parent: self.into(), local_id } };
|
||||||
match toc.split(db) {
|
match toc.split(db) {
|
||||||
Either::Left(it) => GenericParam::ConstParam(it),
|
Either::Left(it) => GenericParam::ConstParam(it),
|
||||||
|
@ -3145,8 +3154,7 @@ impl GenericDef {
|
||||||
pub fn lifetime_params(self, db: &dyn HirDatabase) -> Vec<LifetimeParam> {
|
pub fn lifetime_params(self, db: &dyn HirDatabase) -> Vec<LifetimeParam> {
|
||||||
let generics = db.generic_params(self.into());
|
let generics = db.generic_params(self.into());
|
||||||
generics
|
generics
|
||||||
.lifetimes
|
.iter_lt()
|
||||||
.iter()
|
|
||||||
.map(|(local_id, _)| LifetimeParam {
|
.map(|(local_id, _)| LifetimeParam {
|
||||||
id: LifetimeParamId { parent: self.into(), local_id },
|
id: LifetimeParamId { parent: self.into(), local_id },
|
||||||
})
|
})
|
||||||
|
@ -3156,8 +3164,7 @@ impl GenericDef {
|
||||||
pub fn type_or_const_params(self, db: &dyn HirDatabase) -> Vec<TypeOrConstParam> {
|
pub fn type_or_const_params(self, db: &dyn HirDatabase) -> Vec<TypeOrConstParam> {
|
||||||
let generics = db.generic_params(self.into());
|
let generics = db.generic_params(self.into());
|
||||||
generics
|
generics
|
||||||
.type_or_consts
|
.iter_type_or_consts()
|
||||||
.iter()
|
|
||||||
.map(|(local_id, _)| TypeOrConstParam {
|
.map(|(local_id, _)| TypeOrConstParam {
|
||||||
id: TypeOrConstParamId { parent: self.into(), local_id },
|
id: TypeOrConstParamId { parent: self.into(), local_id },
|
||||||
})
|
})
|
||||||
|
@ -3499,7 +3506,7 @@ impl TypeParam {
|
||||||
/// argument)?
|
/// argument)?
|
||||||
pub fn is_implicit(self, db: &dyn HirDatabase) -> bool {
|
pub fn is_implicit(self, db: &dyn HirDatabase) -> bool {
|
||||||
let params = db.generic_params(self.id.parent());
|
let params = db.generic_params(self.id.parent());
|
||||||
let data = ¶ms.type_or_consts[self.id.local_id()];
|
let data = ¶ms[self.id.local_id()];
|
||||||
match data.type_param().unwrap().provenance {
|
match data.type_param().unwrap().provenance {
|
||||||
hir_def::generics::TypeParamProvenance::TypeParamList => false,
|
hir_def::generics::TypeParamProvenance::TypeParamList => false,
|
||||||
hir_def::generics::TypeParamProvenance::TraitSelf
|
hir_def::generics::TypeParamProvenance::TraitSelf
|
||||||
|
@ -3553,7 +3560,7 @@ pub struct LifetimeParam {
|
||||||
impl LifetimeParam {
|
impl LifetimeParam {
|
||||||
pub fn name(self, db: &dyn HirDatabase) -> Name {
|
pub fn name(self, db: &dyn HirDatabase) -> Name {
|
||||||
let params = db.generic_params(self.id.parent);
|
let params = db.generic_params(self.id.parent);
|
||||||
params.lifetimes[self.id.local_id].name.clone()
|
params[self.id.local_id].name.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn module(self, db: &dyn HirDatabase) -> Module {
|
pub fn module(self, db: &dyn HirDatabase) -> Module {
|
||||||
|
@ -3577,7 +3584,7 @@ impl ConstParam {
|
||||||
|
|
||||||
pub fn name(self, db: &dyn HirDatabase) -> Name {
|
pub fn name(self, db: &dyn HirDatabase) -> Name {
|
||||||
let params = db.generic_params(self.id.parent());
|
let params = db.generic_params(self.id.parent());
|
||||||
match params.type_or_consts[self.id.local_id()].name() {
|
match params[self.id.local_id()].name() {
|
||||||
Some(it) => it.clone(),
|
Some(it) => it.clone(),
|
||||||
None => {
|
None => {
|
||||||
never!();
|
never!();
|
||||||
|
@ -3605,9 +3612,9 @@ impl ConstParam {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generic_arg_from_param(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<GenericArg> {
|
fn generic_arg_from_param(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<GenericArg> {
|
||||||
let params = db.generic_defaults(id.parent);
|
|
||||||
let local_idx = hir_ty::param_idx(db, id)?;
|
let local_idx = hir_ty::param_idx(db, id)?;
|
||||||
let ty = params.get(local_idx)?.clone();
|
let defaults = db.generic_defaults(id.parent);
|
||||||
|
let ty = defaults.get(local_idx)?.clone();
|
||||||
let subst = TyBuilder::placeholder_subst(db, id.parent);
|
let subst = TyBuilder::placeholder_subst(db, id.parent);
|
||||||
Some(ty.substitute(Interner, &subst))
|
Some(ty.substitute(Interner, &subst))
|
||||||
}
|
}
|
||||||
|
@ -3620,7 +3627,7 @@ pub struct TypeOrConstParam {
|
||||||
impl TypeOrConstParam {
|
impl TypeOrConstParam {
|
||||||
pub fn name(self, db: &dyn HirDatabase) -> Name {
|
pub fn name(self, db: &dyn HirDatabase) -> Name {
|
||||||
let params = db.generic_params(self.id.parent);
|
let params = db.generic_params(self.id.parent);
|
||||||
match params.type_or_consts[self.id.local_id].name() {
|
match params[self.id.local_id].name() {
|
||||||
Some(n) => n.clone(),
|
Some(n) => n.clone(),
|
||||||
_ => Name::missing(),
|
_ => Name::missing(),
|
||||||
}
|
}
|
||||||
|
@ -3636,7 +3643,7 @@ impl TypeOrConstParam {
|
||||||
|
|
||||||
pub fn split(self, db: &dyn HirDatabase) -> Either<ConstParam, TypeParam> {
|
pub fn split(self, db: &dyn HirDatabase) -> Either<ConstParam, TypeParam> {
|
||||||
let params = db.generic_params(self.id.parent);
|
let params = db.generic_params(self.id.parent);
|
||||||
match ¶ms.type_or_consts[self.id.local_id] {
|
match ¶ms[self.id.local_id] {
|
||||||
hir_def::generics::TypeOrConstParamData::TypeParamData(_) => {
|
hir_def::generics::TypeOrConstParamData::TypeParamData(_) => {
|
||||||
Either::Right(TypeParam { id: TypeParamId::from_unchecked(self.id) })
|
Either::Right(TypeParam { id: TypeParamId::from_unchecked(self.id) })
|
||||||
}
|
}
|
||||||
|
@ -3655,7 +3662,7 @@ impl TypeOrConstParam {
|
||||||
|
|
||||||
pub fn as_type_param(self, db: &dyn HirDatabase) -> Option<TypeParam> {
|
pub fn as_type_param(self, db: &dyn HirDatabase) -> Option<TypeParam> {
|
||||||
let params = db.generic_params(self.id.parent);
|
let params = db.generic_params(self.id.parent);
|
||||||
match ¶ms.type_or_consts[self.id.local_id] {
|
match ¶ms[self.id.local_id] {
|
||||||
hir_def::generics::TypeOrConstParamData::TypeParamData(_) => {
|
hir_def::generics::TypeOrConstParamData::TypeParamData(_) => {
|
||||||
Some(TypeParam { id: TypeParamId::from_unchecked(self.id) })
|
Some(TypeParam { id: TypeParamId::from_unchecked(self.id) })
|
||||||
}
|
}
|
||||||
|
@ -3665,7 +3672,7 @@ impl TypeOrConstParam {
|
||||||
|
|
||||||
pub fn as_const_param(self, db: &dyn HirDatabase) -> Option<ConstParam> {
|
pub fn as_const_param(self, db: &dyn HirDatabase) -> Option<ConstParam> {
|
||||||
let params = db.generic_params(self.id.parent);
|
let params = db.generic_params(self.id.parent);
|
||||||
match ¶ms.type_or_consts[self.id.local_id] {
|
match ¶ms[self.id.local_id] {
|
||||||
hir_def::generics::TypeOrConstParamData::TypeParamData(_) => None,
|
hir_def::generics::TypeOrConstParamData::TypeParamData(_) => None,
|
||||||
hir_def::generics::TypeOrConstParamData::ConstParamData(_) => {
|
hir_def::generics::TypeOrConstParamData::ConstParamData(_) => {
|
||||||
Some(ConstParam { id: ConstParamId::from_unchecked(self.id) })
|
Some(ConstParam { id: ConstParamId::from_unchecked(self.id) })
|
||||||
|
@ -4052,7 +4059,9 @@ impl Type {
|
||||||
ValueTyDefId::FunctionId(it) => GenericDefId::FunctionId(it),
|
ValueTyDefId::FunctionId(it) => GenericDefId::FunctionId(it),
|
||||||
ValueTyDefId::StructId(it) => GenericDefId::AdtId(AdtId::StructId(it)),
|
ValueTyDefId::StructId(it) => GenericDefId::AdtId(AdtId::StructId(it)),
|
||||||
ValueTyDefId::UnionId(it) => GenericDefId::AdtId(AdtId::UnionId(it)),
|
ValueTyDefId::UnionId(it) => GenericDefId::AdtId(AdtId::UnionId(it)),
|
||||||
ValueTyDefId::EnumVariantId(it) => GenericDefId::EnumVariantId(it),
|
ValueTyDefId::EnumVariantId(it) => {
|
||||||
|
GenericDefId::AdtId(AdtId::EnumId(it.lookup(db.upcast()).parent))
|
||||||
|
}
|
||||||
ValueTyDefId::StaticId(_) => return Type::new(db, def, ty.skip_binders().clone()),
|
ValueTyDefId::StaticId(_) => return Type::new(db, def, ty.skip_binders().clone()),
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
|
@ -68,38 +68,44 @@ impl SourceAnalyzer {
|
||||||
pub(crate) fn new_for_body(
|
pub(crate) fn new_for_body(
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
def: DefWithBodyId,
|
def: DefWithBodyId,
|
||||||
node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
|
node: InFile<&SyntaxNode>,
|
||||||
offset: Option<TextSize>,
|
offset: Option<TextSize>,
|
||||||
) -> SourceAnalyzer {
|
) -> SourceAnalyzer {
|
||||||
let (body, source_map) = db.body_with_source_map(def);
|
Self::new_for_body_(db, def, node, offset, Some(db.infer(def)))
|
||||||
let scopes = db.expr_scopes(def);
|
|
||||||
let scope = match offset {
|
|
||||||
None => scope_for(&scopes, &source_map, node),
|
|
||||||
Some(offset) => scope_for_offset(db, &scopes, &source_map, node.file_id, offset),
|
|
||||||
};
|
|
||||||
let resolver = resolver_for_scope(db.upcast(), def, scope);
|
|
||||||
SourceAnalyzer {
|
|
||||||
resolver,
|
|
||||||
def: Some((def, body, source_map)),
|
|
||||||
infer: Some(db.infer(def)),
|
|
||||||
file_id,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn new_for_body_no_infer(
|
pub(crate) fn new_for_body_no_infer(
|
||||||
|
db: &dyn HirDatabase,
|
||||||
|
def: DefWithBodyId,
|
||||||
|
node: InFile<&SyntaxNode>,
|
||||||
|
offset: Option<TextSize>,
|
||||||
|
) -> SourceAnalyzer {
|
||||||
|
Self::new_for_body_(db, def, node, offset, None)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn new_for_body_(
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
def: DefWithBodyId,
|
def: DefWithBodyId,
|
||||||
node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
|
node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
|
||||||
offset: Option<TextSize>,
|
offset: Option<TextSize>,
|
||||||
|
infer: Option<Arc<InferenceResult>>,
|
||||||
) -> SourceAnalyzer {
|
) -> SourceAnalyzer {
|
||||||
let (body, source_map) = db.body_with_source_map(def);
|
let (body, source_map) = db.body_with_source_map(def);
|
||||||
let scopes = db.expr_scopes(def);
|
let scopes = db.expr_scopes(def);
|
||||||
let scope = match offset {
|
let scope = match offset {
|
||||||
None => scope_for(&scopes, &source_map, node),
|
None => scope_for(db, &scopes, &source_map, node),
|
||||||
Some(offset) => scope_for_offset(db, &scopes, &source_map, node.file_id, offset),
|
Some(offset) => {
|
||||||
|
debug_assert!(
|
||||||
|
node.text_range().contains_inclusive(offset),
|
||||||
|
"{:?} not in {:?}",
|
||||||
|
offset,
|
||||||
|
node.text_range()
|
||||||
|
);
|
||||||
|
scope_for_offset(db, &scopes, &source_map, node.file_id, offset)
|
||||||
|
}
|
||||||
};
|
};
|
||||||
let resolver = resolver_for_scope(db.upcast(), def, scope);
|
let resolver = resolver_for_scope(db.upcast(), def, scope);
|
||||||
SourceAnalyzer { resolver, def: Some((def, body, source_map)), infer: None, file_id }
|
SourceAnalyzer { resolver, def: Some((def, body, source_map)), infer, file_id }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn new_for_resolver(
|
pub(crate) fn new_for_resolver(
|
||||||
|
@ -662,7 +668,6 @@ impl SourceAnalyzer {
|
||||||
return resolved;
|
return resolved;
|
||||||
}
|
}
|
||||||
|
|
||||||
// This must be a normal source file rather than macro file.
|
|
||||||
let ctx = LowerCtx::new(db.upcast(), self.file_id);
|
let ctx = LowerCtx::new(db.upcast(), self.file_id);
|
||||||
let hir_path = Path::from_src(&ctx, path.clone())?;
|
let hir_path = Path::from_src(&ctx, path.clone())?;
|
||||||
|
|
||||||
|
@ -955,14 +960,15 @@ impl SourceAnalyzer {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn scope_for(
|
fn scope_for(
|
||||||
|
db: &dyn HirDatabase,
|
||||||
scopes: &ExprScopes,
|
scopes: &ExprScopes,
|
||||||
source_map: &BodySourceMap,
|
source_map: &BodySourceMap,
|
||||||
node: InFile<&SyntaxNode>,
|
node: InFile<&SyntaxNode>,
|
||||||
) -> Option<ScopeId> {
|
) -> Option<ScopeId> {
|
||||||
node.value
|
node.ancestors_with_macros(db.upcast())
|
||||||
.ancestors()
|
.take_while(|it| !ast::Item::can_cast(it.kind()) || ast::MacroCall::can_cast(it.kind()))
|
||||||
.filter_map(ast::Expr::cast)
|
.filter_map(|it| it.map(ast::Expr::cast).transpose())
|
||||||
.filter_map(|it| source_map.node_expr(InFile::new(node.file_id, &it)))
|
.filter_map(|it| source_map.node_expr(it.as_ref()))
|
||||||
.find_map(|it| scopes.scope_for(it))
|
.find_map(|it| scopes.scope_for(it))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -988,8 +994,8 @@ fn scope_for_offset(
|
||||||
Some(it.file_id.macro_file()?.call_node(db.upcast()))
|
Some(it.file_id.macro_file()?.call_node(db.upcast()))
|
||||||
})
|
})
|
||||||
.find(|it| it.file_id == from_file)
|
.find(|it| it.file_id == from_file)
|
||||||
.filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?;
|
.filter(|it| it.kind() == SyntaxKind::MACRO_CALL)?;
|
||||||
Some((source.value.text_range(), scope))
|
Some((source.text_range(), scope))
|
||||||
})
|
})
|
||||||
.filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end())
|
.filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end())
|
||||||
// find containing scope
|
// find containing scope
|
||||||
|
|
|
@ -231,7 +231,7 @@ impl<'a> SymbolCollector<'a> {
|
||||||
let impl_data = self.db.impl_data(impl_id);
|
let impl_data = self.db.impl_data(impl_id);
|
||||||
let impl_name = Some(SmolStr::new(impl_data.self_ty.display(self.db).to_string()));
|
let impl_name = Some(SmolStr::new(impl_data.self_ty.display(self.db).to_string()));
|
||||||
self.with_container_name(impl_name, |s| {
|
self.with_container_name(impl_name, |s| {
|
||||||
for &assoc_item_id in &impl_data.items {
|
for &assoc_item_id in impl_data.items.iter() {
|
||||||
s.push_assoc_item(assoc_item_id)
|
s.push_assoc_item(assoc_item_id)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
|
@ -93,12 +93,6 @@ struct LookupTable {
|
||||||
data: FxHashMap<Type, AlternativeExprs>,
|
data: FxHashMap<Type, AlternativeExprs>,
|
||||||
/// New types reached since last query by the `NewTypesKey`
|
/// New types reached since last query by the `NewTypesKey`
|
||||||
new_types: FxHashMap<NewTypesKey, Vec<Type>>,
|
new_types: FxHashMap<NewTypesKey, Vec<Type>>,
|
||||||
/// ScopeDefs that are not interesting any more
|
|
||||||
exhausted_scopedefs: FxHashSet<ScopeDef>,
|
|
||||||
/// ScopeDefs that were used in current round
|
|
||||||
round_scopedef_hits: FxHashSet<ScopeDef>,
|
|
||||||
/// Amount of rounds since scopedef was first used.
|
|
||||||
rounds_since_sopedef_hit: FxHashMap<ScopeDef, u32>,
|
|
||||||
/// Types queried but not present
|
/// Types queried but not present
|
||||||
types_wishlist: FxHashSet<Type>,
|
types_wishlist: FxHashSet<Type>,
|
||||||
/// Threshold to squash trees to `Many`
|
/// Threshold to squash trees to `Many`
|
||||||
|
@ -212,37 +206,6 @@ impl LookupTable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Mark `ScopeDef` as exhausted meaning it is not interesting for us any more
|
|
||||||
fn mark_exhausted(&mut self, def: ScopeDef) {
|
|
||||||
self.exhausted_scopedefs.insert(def);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Mark `ScopeDef` as used meaning we managed to produce something useful from it
|
|
||||||
fn mark_fulfilled(&mut self, def: ScopeDef) {
|
|
||||||
self.round_scopedef_hits.insert(def);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Start new round (meant to be called at the beginning of iteration in `term_search`)
|
|
||||||
///
|
|
||||||
/// This functions marks some `ScopeDef`s as exhausted if there have been
|
|
||||||
/// `MAX_ROUNDS_AFTER_HIT` rounds after first using a `ScopeDef`.
|
|
||||||
fn new_round(&mut self) {
|
|
||||||
for def in &self.round_scopedef_hits {
|
|
||||||
let hits =
|
|
||||||
self.rounds_since_sopedef_hit.entry(*def).and_modify(|n| *n += 1).or_insert(0);
|
|
||||||
const MAX_ROUNDS_AFTER_HIT: u32 = 2;
|
|
||||||
if *hits > MAX_ROUNDS_AFTER_HIT {
|
|
||||||
self.exhausted_scopedefs.insert(*def);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.round_scopedef_hits.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get exhausted `ScopeDef`s
|
|
||||||
fn exhausted_scopedefs(&self) -> &FxHashSet<ScopeDef> {
|
|
||||||
&self.exhausted_scopedefs
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Types queried but not found
|
/// Types queried but not found
|
||||||
fn types_wishlist(&mut self) -> &FxHashSet<Type> {
|
fn types_wishlist(&mut self) -> &FxHashSet<Type> {
|
||||||
&self.types_wishlist
|
&self.types_wishlist
|
||||||
|
@ -275,7 +238,7 @@ pub struct TermSearchConfig {
|
||||||
|
|
||||||
impl Default for TermSearchConfig {
|
impl Default for TermSearchConfig {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self { enable_borrowcheck: true, many_alternatives_threshold: 1, fuel: 400 }
|
Self { enable_borrowcheck: true, many_alternatives_threshold: 1, fuel: 1200 }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -328,19 +291,12 @@ pub fn term_search<DB: HirDatabase>(ctx: &TermSearchCtx<'_, DB>) -> Vec<Expr> {
|
||||||
solutions.extend(tactics::assoc_const(ctx, &defs, &mut lookup));
|
solutions.extend(tactics::assoc_const(ctx, &defs, &mut lookup));
|
||||||
|
|
||||||
while should_continue() {
|
while should_continue() {
|
||||||
lookup.new_round();
|
|
||||||
|
|
||||||
solutions.extend(tactics::data_constructor(ctx, &defs, &mut lookup, should_continue));
|
solutions.extend(tactics::data_constructor(ctx, &defs, &mut lookup, should_continue));
|
||||||
solutions.extend(tactics::free_function(ctx, &defs, &mut lookup, should_continue));
|
solutions.extend(tactics::free_function(ctx, &defs, &mut lookup, should_continue));
|
||||||
solutions.extend(tactics::impl_method(ctx, &defs, &mut lookup, should_continue));
|
solutions.extend(tactics::impl_method(ctx, &defs, &mut lookup, should_continue));
|
||||||
solutions.extend(tactics::struct_projection(ctx, &defs, &mut lookup, should_continue));
|
solutions.extend(tactics::struct_projection(ctx, &defs, &mut lookup, should_continue));
|
||||||
solutions.extend(tactics::impl_static_method(ctx, &defs, &mut lookup, should_continue));
|
solutions.extend(tactics::impl_static_method(ctx, &defs, &mut lookup, should_continue));
|
||||||
solutions.extend(tactics::make_tuple(ctx, &defs, &mut lookup, should_continue));
|
solutions.extend(tactics::make_tuple(ctx, &defs, &mut lookup, should_continue));
|
||||||
|
|
||||||
// Discard not interesting `ScopeDef`s for speedup
|
|
||||||
for def in lookup.exhausted_scopedefs() {
|
|
||||||
defs.remove(def);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
solutions.into_iter().filter(|it| !it.is_many()).unique().collect()
|
solutions.into_iter().filter(|it| !it.is_many()).unique().collect()
|
||||||
|
|
|
@ -9,8 +9,8 @@ use hir_ty::{
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Field, Function, GenericDef, Local,
|
Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Field, Function, Local, ModuleDef,
|
||||||
ModuleDef, SemanticsScope, Static, Struct, StructKind, Trait, Type, Variant,
|
SemanticsScope, Static, Struct, StructKind, Trait, Type, Variant,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Helper function to get path to `ModuleDef`
|
/// Helper function to get path to `ModuleDef`
|
||||||
|
@ -35,43 +35,6 @@ fn mod_item_path_str(
|
||||||
.ok_or(DisplaySourceCodeError::PathNotFound)
|
.ok_or(DisplaySourceCodeError::PathNotFound)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Helper function to get path to `Type`
|
|
||||||
fn type_path(
|
|
||||||
sema_scope: &SemanticsScope<'_>,
|
|
||||||
ty: &Type,
|
|
||||||
cfg: ImportPathConfig,
|
|
||||||
) -> Result<String, DisplaySourceCodeError> {
|
|
||||||
let db = sema_scope.db;
|
|
||||||
let m = sema_scope.module();
|
|
||||||
|
|
||||||
match ty.as_adt() {
|
|
||||||
Some(adt) => {
|
|
||||||
let ty_name = ty.display_source_code(db, m.id, true)?;
|
|
||||||
|
|
||||||
let mut path = mod_item_path(sema_scope, &ModuleDef::Adt(adt), cfg).unwrap();
|
|
||||||
path.pop_segment();
|
|
||||||
let path = path.display(db.upcast()).to_string();
|
|
||||||
let res = match path.is_empty() {
|
|
||||||
true => ty_name,
|
|
||||||
false => format!("{path}::{ty_name}"),
|
|
||||||
};
|
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
None => ty.display_source_code(db, m.id, true),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Helper function to filter out generic parameters that are default
|
|
||||||
fn non_default_generics(db: &dyn HirDatabase, def: GenericDef, generics: &[Type]) -> Vec<Type> {
|
|
||||||
def.type_or_const_params(db)
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|it| it.as_type_param(db))
|
|
||||||
.zip(generics)
|
|
||||||
.filter(|(tp, arg)| tp.default(db).as_ref() != Some(arg))
|
|
||||||
.map(|(_, arg)| arg.clone())
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Type tree shows how can we get from set of types to some type.
|
/// Type tree shows how can we get from set of types to some type.
|
||||||
///
|
///
|
||||||
/// Consider the following code as an example
|
/// Consider the following code as an example
|
||||||
|
@ -208,20 +171,7 @@ impl Expr {
|
||||||
None => Ok(format!("{target_str}.{func_name}({args})")),
|
None => Ok(format!("{target_str}.{func_name}({args})")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Expr::Variant { variant, generics, params } => {
|
Expr::Variant { variant, params, .. } => {
|
||||||
let generics = non_default_generics(db, (*variant).into(), generics);
|
|
||||||
let generics_str = match generics.is_empty() {
|
|
||||||
true => String::new(),
|
|
||||||
false => {
|
|
||||||
let generics = generics
|
|
||||||
.iter()
|
|
||||||
.map(|it| type_path(sema_scope, it, cfg))
|
|
||||||
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
|
|
||||||
.into_iter()
|
|
||||||
.join(", ");
|
|
||||||
format!("::<{generics}>")
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let inner = match variant.kind(db) {
|
let inner = match variant.kind(db) {
|
||||||
StructKind::Tuple => {
|
StructKind::Tuple => {
|
||||||
let args = params
|
let args = params
|
||||||
|
@ -230,7 +180,7 @@ impl Expr {
|
||||||
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
|
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.join(", ");
|
.join(", ");
|
||||||
format!("{generics_str}({args})")
|
format!("({args})")
|
||||||
}
|
}
|
||||||
StructKind::Record => {
|
StructKind::Record => {
|
||||||
let fields = variant.fields(db);
|
let fields = variant.fields(db);
|
||||||
|
@ -248,16 +198,15 @@ impl Expr {
|
||||||
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
|
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.join(", ");
|
.join(", ");
|
||||||
format!("{generics_str}{{ {args} }}")
|
format!("{{ {args} }}")
|
||||||
}
|
}
|
||||||
StructKind::Unit => generics_str,
|
StructKind::Unit => String::new(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let prefix = mod_item_path_str(sema_scope, &ModuleDef::Variant(*variant))?;
|
let prefix = mod_item_path_str(sema_scope, &ModuleDef::Variant(*variant))?;
|
||||||
Ok(format!("{prefix}{inner}"))
|
Ok(format!("{prefix}{inner}"))
|
||||||
}
|
}
|
||||||
Expr::Struct { strukt, generics, params } => {
|
Expr::Struct { strukt, params, .. } => {
|
||||||
let generics = non_default_generics(db, (*strukt).into(), generics);
|
|
||||||
let inner = match strukt.kind(db) {
|
let inner = match strukt.kind(db) {
|
||||||
StructKind::Tuple => {
|
StructKind::Tuple => {
|
||||||
let args = params
|
let args = params
|
||||||
|
@ -286,18 +235,7 @@ impl Expr {
|
||||||
.join(", ");
|
.join(", ");
|
||||||
format!(" {{ {args} }}")
|
format!(" {{ {args} }}")
|
||||||
}
|
}
|
||||||
StructKind::Unit => match generics.is_empty() {
|
StructKind::Unit => String::new(),
|
||||||
true => String::new(),
|
|
||||||
false => {
|
|
||||||
let generics = generics
|
|
||||||
.iter()
|
|
||||||
.map(|it| type_path(sema_scope, it, cfg))
|
|
||||||
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
|
|
||||||
.into_iter()
|
|
||||||
.join(", ");
|
|
||||||
format!("::<{generics}>")
|
|
||||||
}
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let prefix = mod_item_path_str(sema_scope, &ModuleDef::Adt(Adt::Struct(*strukt)))?;
|
let prefix = mod_item_path_str(sema_scope, &ModuleDef::Adt(Adt::Struct(*strukt)))?;
|
||||||
|
|
|
@ -17,11 +17,11 @@ use itertools::Itertools;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
Adt, AssocItem, Enum, GenericDef, GenericParam, HasVisibility, Impl, ModuleDef, ScopeDef, Type,
|
Adt, AssocItem, GenericDef, GenericParam, HasAttrs, HasVisibility, Impl, ModuleDef, ScopeDef,
|
||||||
TypeParam, Variant,
|
Type, TypeParam,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::term_search::{Expr, TermSearchConfig};
|
use crate::term_search::Expr;
|
||||||
|
|
||||||
use super::{LookupTable, NewTypesKey, TermSearchCtx};
|
use super::{LookupTable, NewTypesKey, TermSearchCtx};
|
||||||
|
|
||||||
|
@ -74,8 +74,6 @@ pub(super) fn trivial<'a, DB: HirDatabase>(
|
||||||
_ => None,
|
_ => None,
|
||||||
}?;
|
}?;
|
||||||
|
|
||||||
lookup.mark_exhausted(*def);
|
|
||||||
|
|
||||||
let ty = expr.ty(db);
|
let ty = expr.ty(db);
|
||||||
lookup.insert(ty.clone(), std::iter::once(expr.clone()));
|
lookup.insert(ty.clone(), std::iter::once(expr.clone()));
|
||||||
|
|
||||||
|
@ -124,6 +122,10 @@ pub(super) fn assoc_const<'a, DB: HirDatabase>(
|
||||||
.filter(move |it| it.is_visible_from(db, module))
|
.filter(move |it| it.is_visible_from(db, module))
|
||||||
.filter_map(AssocItem::as_const)
|
.filter_map(AssocItem::as_const)
|
||||||
.filter_map(|it| {
|
.filter_map(|it| {
|
||||||
|
if it.attrs(db).is_unstable() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
let expr = Expr::Const(it);
|
let expr = Expr::Const(it);
|
||||||
let ty = it.ty(db);
|
let ty = it.ty(db);
|
||||||
|
|
||||||
|
@ -151,163 +153,27 @@ pub(super) fn assoc_const<'a, DB: HirDatabase>(
|
||||||
/// * `should_continue` - Function that indicates when to stop iterating
|
/// * `should_continue` - Function that indicates when to stop iterating
|
||||||
pub(super) fn data_constructor<'a, DB: HirDatabase>(
|
pub(super) fn data_constructor<'a, DB: HirDatabase>(
|
||||||
ctx: &'a TermSearchCtx<'a, DB>,
|
ctx: &'a TermSearchCtx<'a, DB>,
|
||||||
defs: &'a FxHashSet<ScopeDef>,
|
_defs: &'a FxHashSet<ScopeDef>,
|
||||||
lookup: &'a mut LookupTable,
|
lookup: &'a mut LookupTable,
|
||||||
should_continue: &'a dyn std::ops::Fn() -> bool,
|
should_continue: &'a dyn std::ops::Fn() -> bool,
|
||||||
) -> impl Iterator<Item = Expr> + 'a {
|
) -> impl Iterator<Item = Expr> + 'a {
|
||||||
let db = ctx.sema.db;
|
let db = ctx.sema.db;
|
||||||
let module = ctx.scope.module();
|
let module = ctx.scope.module();
|
||||||
fn variant_helper(
|
lookup
|
||||||
db: &dyn HirDatabase,
|
.types_wishlist()
|
||||||
lookup: &mut LookupTable,
|
.clone()
|
||||||
should_continue: &dyn std::ops::Fn() -> bool,
|
.into_iter()
|
||||||
parent_enum: Enum,
|
.chain(iter::once(ctx.goal.clone()))
|
||||||
variant: Variant,
|
.filter_map(|ty| ty.as_adt().map(|adt| (adt, ty)))
|
||||||
config: &TermSearchConfig,
|
.filter(|_| should_continue())
|
||||||
) -> Vec<(Type, Vec<Expr>)> {
|
.filter_map(move |(adt, ty)| match adt {
|
||||||
// Ignore unstable
|
Adt::Struct(strukt) => {
|
||||||
if variant.is_unstable(db) {
|
// Ignore unstable or not visible
|
||||||
return Vec::new();
|
if strukt.is_unstable(db) || !strukt.is_visible_from(db, module) {
|
||||||
}
|
|
||||||
|
|
||||||
let generics = GenericDef::from(variant.parent_enum(db));
|
|
||||||
let Some(type_params) = generics
|
|
||||||
.type_or_const_params(db)
|
|
||||||
.into_iter()
|
|
||||||
.map(|it| it.as_type_param(db))
|
|
||||||
.collect::<Option<Vec<TypeParam>>>()
|
|
||||||
else {
|
|
||||||
// Ignore enums with const generics
|
|
||||||
return Vec::new();
|
|
||||||
};
|
|
||||||
|
|
||||||
// We currently do not check lifetime bounds so ignore all types that have something to do
|
|
||||||
// with them
|
|
||||||
if !generics.lifetime_params(db).is_empty() {
|
|
||||||
return Vec::new();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Only account for stable type parameters for now, unstable params can be default
|
|
||||||
// tho, for example in `Box<T, #[unstable] A: Allocator>`
|
|
||||||
if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) {
|
|
||||||
return Vec::new();
|
|
||||||
}
|
|
||||||
|
|
||||||
let non_default_type_params_len =
|
|
||||||
type_params.iter().filter(|it| it.default(db).is_none()).count();
|
|
||||||
|
|
||||||
let enum_ty_shallow = Adt::from(parent_enum).ty(db);
|
|
||||||
let generic_params = lookup
|
|
||||||
.types_wishlist()
|
|
||||||
.clone()
|
|
||||||
.into_iter()
|
|
||||||
.filter(|ty| ty.could_unify_with(db, &enum_ty_shallow))
|
|
||||||
.map(|it| it.type_arguments().collect::<Vec<Type>>())
|
|
||||||
.chain((non_default_type_params_len == 0).then_some(Vec::new()));
|
|
||||||
|
|
||||||
generic_params
|
|
||||||
.filter(|_| should_continue())
|
|
||||||
.filter_map(move |generics| {
|
|
||||||
// Insert default type params
|
|
||||||
let mut g = generics.into_iter();
|
|
||||||
let generics: Vec<_> = type_params
|
|
||||||
.iter()
|
|
||||||
.map(|it| it.default(db).or_else(|| g.next()))
|
|
||||||
.collect::<Option<_>>()?;
|
|
||||||
|
|
||||||
let enum_ty = Adt::from(parent_enum).ty_with_args(db, generics.iter().cloned());
|
|
||||||
|
|
||||||
// Ignore types that have something to do with lifetimes
|
|
||||||
if config.enable_borrowcheck && enum_ty.contains_reference(db) {
|
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Early exit if some param cannot be filled from lookup
|
let generics = GenericDef::from(strukt);
|
||||||
let param_exprs: Vec<Vec<Expr>> = variant
|
|
||||||
.fields(db)
|
|
||||||
.into_iter()
|
|
||||||
.map(|field| lookup.find(db, &field.ty_with_args(db, generics.iter().cloned())))
|
|
||||||
.collect::<Option<_>>()?;
|
|
||||||
|
|
||||||
// Note that we need special case for 0 param constructors because of multi cartesian
|
|
||||||
// product
|
|
||||||
let variant_exprs: Vec<Expr> = if param_exprs.is_empty() {
|
|
||||||
vec![Expr::Variant { variant, generics, params: Vec::new() }]
|
|
||||||
} else {
|
|
||||||
param_exprs
|
|
||||||
.into_iter()
|
|
||||||
.multi_cartesian_product()
|
|
||||||
.map(|params| Expr::Variant { variant, generics: generics.clone(), params })
|
|
||||||
.collect()
|
|
||||||
};
|
|
||||||
lookup.insert(enum_ty.clone(), variant_exprs.iter().cloned());
|
|
||||||
|
|
||||||
Some((enum_ty, variant_exprs))
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
defs.iter()
|
|
||||||
.filter_map(move |def| match def {
|
|
||||||
ScopeDef::ModuleDef(ModuleDef::Variant(it)) => {
|
|
||||||
let variant_exprs = variant_helper(
|
|
||||||
db,
|
|
||||||
lookup,
|
|
||||||
should_continue,
|
|
||||||
it.parent_enum(db),
|
|
||||||
*it,
|
|
||||||
&ctx.config,
|
|
||||||
);
|
|
||||||
if variant_exprs.is_empty() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
if GenericDef::from(it.parent_enum(db))
|
|
||||||
.type_or_const_params(db)
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|it| it.as_type_param(db))
|
|
||||||
.all(|it| it.default(db).is_some())
|
|
||||||
{
|
|
||||||
lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Variant(*it)));
|
|
||||||
}
|
|
||||||
Some(variant_exprs)
|
|
||||||
}
|
|
||||||
ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(enum_))) => {
|
|
||||||
let exprs: Vec<(Type, Vec<Expr>)> = enum_
|
|
||||||
.variants(db)
|
|
||||||
.into_iter()
|
|
||||||
.flat_map(|it| {
|
|
||||||
variant_helper(db, lookup, should_continue, *enum_, it, &ctx.config)
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
if exprs.is_empty() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
if GenericDef::from(*enum_)
|
|
||||||
.type_or_const_params(db)
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|it| it.as_type_param(db))
|
|
||||||
.all(|it| it.default(db).is_some())
|
|
||||||
{
|
|
||||||
lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(*enum_))));
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(exprs)
|
|
||||||
}
|
|
||||||
ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Struct(it))) => {
|
|
||||||
// Ignore unstable and not visible
|
|
||||||
if it.is_unstable(db) || !it.is_visible_from(db, module) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let generics = GenericDef::from(*it);
|
|
||||||
|
|
||||||
// Ignore const params for now
|
|
||||||
let type_params = generics
|
|
||||||
.type_or_const_params(db)
|
|
||||||
.into_iter()
|
|
||||||
.map(|it| it.as_type_param(db))
|
|
||||||
.collect::<Option<Vec<TypeParam>>>()?;
|
|
||||||
|
|
||||||
// We currently do not check lifetime bounds so ignore all types that have something to do
|
// We currently do not check lifetime bounds so ignore all types that have something to do
|
||||||
// with them
|
// with them
|
||||||
|
@ -315,48 +181,73 @@ pub(super) fn data_constructor<'a, DB: HirDatabase>(
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Only account for stable type parameters for now, unstable params can be default
|
if ty.contains_unknown() {
|
||||||
// tho, for example in `Box<T, #[unstable] A: Allocator>`
|
|
||||||
if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) {
|
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let non_default_type_params_len =
|
// Ignore types that have something to do with lifetimes
|
||||||
type_params.iter().filter(|it| it.default(db).is_none()).count();
|
if ctx.config.enable_borrowcheck && ty.contains_reference(db) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let fields = strukt.fields(db);
|
||||||
|
// Check if all fields are visible, otherwise we cannot fill them
|
||||||
|
if fields.iter().any(|it| !it.is_visible_from(db, module)) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
let struct_ty_shallow = Adt::from(*it).ty(db);
|
let generics: Vec<_> = ty.type_arguments().collect();
|
||||||
let generic_params = lookup
|
|
||||||
.types_wishlist()
|
// Early exit if some param cannot be filled from lookup
|
||||||
.clone()
|
let param_exprs: Vec<Vec<Expr>> = fields
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|ty| ty.could_unify_with(db, &struct_ty_shallow))
|
.map(|field| lookup.find(db, &field.ty_with_args(db, generics.iter().cloned())))
|
||||||
.map(|it| it.type_arguments().collect::<Vec<Type>>())
|
.collect::<Option<_>>()?;
|
||||||
.chain((non_default_type_params_len == 0).then_some(Vec::new()));
|
|
||||||
|
|
||||||
let exprs = generic_params
|
// Note that we need special case for 0 param constructors because of multi cartesian
|
||||||
.filter(|_| should_continue())
|
// product
|
||||||
.filter_map(|generics| {
|
let exprs: Vec<Expr> = if param_exprs.is_empty() {
|
||||||
// Insert default type params
|
vec![Expr::Struct { strukt, generics, params: Vec::new() }]
|
||||||
let mut g = generics.into_iter();
|
} else {
|
||||||
let generics: Vec<_> = type_params
|
param_exprs
|
||||||
.iter()
|
.into_iter()
|
||||||
.map(|it| it.default(db).or_else(|| g.next()))
|
.multi_cartesian_product()
|
||||||
.collect::<Option<_>>()?;
|
.map(|params| Expr::Struct { strukt, generics: generics.clone(), params })
|
||||||
|
.collect()
|
||||||
|
};
|
||||||
|
|
||||||
let struct_ty = Adt::from(*it).ty_with_args(db, generics.iter().cloned());
|
lookup.insert(ty.clone(), exprs.iter().cloned());
|
||||||
|
Some((ty, exprs))
|
||||||
|
}
|
||||||
|
Adt::Enum(enum_) => {
|
||||||
|
// Ignore unstable or not visible
|
||||||
|
if enum_.is_unstable(db) || !enum_.is_visible_from(db, module) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
// Ignore types that have something to do with lifetimes
|
let generics = GenericDef::from(enum_);
|
||||||
if ctx.config.enable_borrowcheck && struct_ty.contains_reference(db) {
|
// We currently do not check lifetime bounds so ignore all types that have something to do
|
||||||
return None;
|
// with them
|
||||||
}
|
if !generics.lifetime_params(db).is_empty() {
|
||||||
let fields = it.fields(db);
|
return None;
|
||||||
// Check if all fields are visible, otherwise we cannot fill them
|
}
|
||||||
if fields.iter().any(|it| !it.is_visible_from(db, module)) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
if ty.contains_unknown() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ignore types that have something to do with lifetimes
|
||||||
|
if ctx.config.enable_borrowcheck && ty.contains_reference(db) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let generics: Vec<_> = ty.type_arguments().collect();
|
||||||
|
let exprs = enum_
|
||||||
|
.variants(db)
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|variant| {
|
||||||
// Early exit if some param cannot be filled from lookup
|
// Early exit if some param cannot be filled from lookup
|
||||||
let param_exprs: Vec<Vec<Expr>> = fields
|
let param_exprs: Vec<Vec<Expr>> = variant
|
||||||
|
.fields(db)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|field| {
|
.map(|field| {
|
||||||
lookup.find(db, &field.ty_with_args(db, generics.iter().cloned()))
|
lookup.find(db, &field.ty_with_args(db, generics.iter().cloned()))
|
||||||
|
@ -365,36 +256,33 @@ pub(super) fn data_constructor<'a, DB: HirDatabase>(
|
||||||
|
|
||||||
// Note that we need special case for 0 param constructors because of multi cartesian
|
// Note that we need special case for 0 param constructors because of multi cartesian
|
||||||
// product
|
// product
|
||||||
let struct_exprs: Vec<Expr> = if param_exprs.is_empty() {
|
let variant_exprs: Vec<Expr> = if param_exprs.is_empty() {
|
||||||
vec![Expr::Struct { strukt: *it, generics, params: Vec::new() }]
|
vec![Expr::Variant {
|
||||||
|
variant,
|
||||||
|
generics: generics.clone(),
|
||||||
|
params: Vec::new(),
|
||||||
|
}]
|
||||||
} else {
|
} else {
|
||||||
param_exprs
|
param_exprs
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.multi_cartesian_product()
|
.multi_cartesian_product()
|
||||||
.map(|params| Expr::Struct {
|
.map(|params| Expr::Variant {
|
||||||
strukt: *it,
|
variant,
|
||||||
generics: generics.clone(),
|
generics: generics.clone(),
|
||||||
params,
|
params,
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
};
|
};
|
||||||
|
lookup.insert(ty.clone(), variant_exprs.iter().cloned());
|
||||||
if non_default_type_params_len == 0 {
|
Some(variant_exprs)
|
||||||
// Fulfilled only if there are no generic parameters
|
|
||||||
lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(
|
|
||||||
Adt::Struct(*it),
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
lookup.insert(struct_ty.clone(), struct_exprs.iter().cloned());
|
|
||||||
|
|
||||||
Some((struct_ty, struct_exprs))
|
|
||||||
})
|
})
|
||||||
|
.flatten()
|
||||||
.collect();
|
.collect();
|
||||||
Some(exprs)
|
|
||||||
|
Some((ty, exprs))
|
||||||
}
|
}
|
||||||
_ => None,
|
Adt::Union(_) => None,
|
||||||
})
|
})
|
||||||
.flatten()
|
|
||||||
.filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs))
|
.filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs))
|
||||||
.flatten()
|
.flatten()
|
||||||
}
|
}
|
||||||
|
@ -515,7 +403,6 @@ pub(super) fn free_function<'a, DB: HirDatabase>(
|
||||||
.collect()
|
.collect()
|
||||||
};
|
};
|
||||||
|
|
||||||
lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Function(*it)));
|
|
||||||
lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned());
|
lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned());
|
||||||
Some((ret_ty, fn_exprs))
|
Some((ret_ty, fn_exprs))
|
||||||
})
|
})
|
||||||
|
@ -555,6 +442,8 @@ pub(super) fn impl_method<'a, DB: HirDatabase>(
|
||||||
lookup
|
lookup
|
||||||
.new_types(NewTypesKey::ImplMethod)
|
.new_types(NewTypesKey::ImplMethod)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
.filter(|ty| !ty.type_arguments().any(|it| it.contains_unknown()))
|
||||||
|
.filter(|_| should_continue())
|
||||||
.flat_map(|ty| {
|
.flat_map(|ty| {
|
||||||
Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp))
|
Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp))
|
||||||
})
|
})
|
||||||
|
@ -563,26 +452,15 @@ pub(super) fn impl_method<'a, DB: HirDatabase>(
|
||||||
AssocItem::Function(f) => Some((imp, ty, f)),
|
AssocItem::Function(f) => Some((imp, ty, f)),
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
|
.filter(|_| should_continue())
|
||||||
.filter_map(move |(imp, ty, it)| {
|
.filter_map(move |(imp, ty, it)| {
|
||||||
let fn_generics = GenericDef::from(it);
|
let fn_generics = GenericDef::from(it);
|
||||||
let imp_generics = GenericDef::from(imp);
|
let imp_generics = GenericDef::from(imp);
|
||||||
|
|
||||||
// Ignore const params for now
|
|
||||||
let imp_type_params = imp_generics
|
|
||||||
.type_or_const_params(db)
|
|
||||||
.into_iter()
|
|
||||||
.map(|it| it.as_type_param(db))
|
|
||||||
.collect::<Option<Vec<TypeParam>>>()?;
|
|
||||||
|
|
||||||
// Ignore const params for now
|
|
||||||
let fn_type_params = fn_generics
|
|
||||||
.type_or_const_params(db)
|
|
||||||
.into_iter()
|
|
||||||
.map(|it| it.as_type_param(db))
|
|
||||||
.collect::<Option<Vec<TypeParam>>>()?;
|
|
||||||
|
|
||||||
// Ignore all functions that have something to do with lifetimes as we don't check them
|
// Ignore all functions that have something to do with lifetimes as we don't check them
|
||||||
if !fn_generics.lifetime_params(db).is_empty() {
|
if !fn_generics.lifetime_params(db).is_empty()
|
||||||
|
|| !imp_generics.lifetime_params(db).is_empty()
|
||||||
|
{
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -596,112 +474,59 @@ pub(super) fn impl_method<'a, DB: HirDatabase>(
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Only account for stable type parameters for now, unstable params can be default
|
// Ignore functions with generics for now as they kill the performance
|
||||||
// tho, for example in `Box<T, #[unstable] A: Allocator>`
|
// Also checking bounds for generics is problematic
|
||||||
if imp_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none())
|
if !fn_generics.type_or_const_params(db).is_empty() {
|
||||||
|| fn_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none())
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let ret_ty = it.ret_type_with_args(db, ty.type_arguments());
|
||||||
|
// Filter out functions that return references
|
||||||
|
if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) || ret_ty.is_raw_ptr()
|
||||||
{
|
{
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Double check that we have fully known type
|
// Ignore functions that do not change the type
|
||||||
if ty.type_arguments().any(|it| it.contains_unknown()) {
|
if ty.could_unify_with_deeply(db, &ret_ty) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let non_default_fn_type_params_len =
|
let self_ty =
|
||||||
fn_type_params.iter().filter(|it| it.default(db).is_none()).count();
|
it.self_param(db).expect("No self param").ty_with_args(db, ty.type_arguments());
|
||||||
|
|
||||||
// Ignore functions with generics for now as they kill the performance
|
// Ignore functions that have different self type
|
||||||
// Also checking bounds for generics is problematic
|
if !self_ty.autoderef(db).any(|s_ty| ty == s_ty) {
|
||||||
if non_default_fn_type_params_len > 0 {
|
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let generic_params = lookup
|
let target_type_exprs = lookup.find(db, &ty).expect("Type not in lookup");
|
||||||
.iter_types()
|
|
||||||
.collect::<Vec<_>>() // Force take ownership
|
// Early exit if some param cannot be filled from lookup
|
||||||
|
let param_exprs: Vec<Vec<Expr>> = it
|
||||||
|
.params_without_self_with_args(db, ty.type_arguments())
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.permutations(non_default_fn_type_params_len);
|
.map(|field| lookup.find_autoref(db, field.ty()))
|
||||||
|
.collect::<Option<_>>()?;
|
||||||
|
|
||||||
let exprs: Vec<_> = generic_params
|
let generics: Vec<_> = ty.type_arguments().collect();
|
||||||
.filter(|_| should_continue())
|
let fn_exprs: Vec<Expr> = std::iter::once(target_type_exprs)
|
||||||
.filter_map(|generics| {
|
.chain(param_exprs)
|
||||||
// Insert default type params
|
.multi_cartesian_product()
|
||||||
let mut g = generics.into_iter();
|
.map(|params| {
|
||||||
let generics: Vec<_> = ty
|
let mut params = params.into_iter();
|
||||||
.type_arguments()
|
let target = Box::new(params.next().unwrap());
|
||||||
.map(Some)
|
Expr::Method {
|
||||||
.chain(fn_type_params.iter().map(|it| match it.default(db) {
|
func: it,
|
||||||
Some(ty) => Some(ty),
|
generics: generics.clone(),
|
||||||
None => {
|
target,
|
||||||
let generic = g.next().expect("Missing type param");
|
params: params.collect(),
|
||||||
// Filter out generics that do not unify due to trait bounds
|
|
||||||
it.ty(db).could_unify_with(db, &generic).then_some(generic)
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
.collect::<Option<_>>()?;
|
|
||||||
|
|
||||||
let ret_ty = it.ret_type_with_args(
|
|
||||||
db,
|
|
||||||
ty.type_arguments().chain(generics.iter().cloned()),
|
|
||||||
);
|
|
||||||
// Filter out functions that return references
|
|
||||||
if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db)
|
|
||||||
|| ret_ty.is_raw_ptr()
|
|
||||||
{
|
|
||||||
return None;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ignore functions that do not change the type
|
|
||||||
if ty.could_unify_with_deeply(db, &ret_ty) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let self_ty = it
|
|
||||||
.self_param(db)
|
|
||||||
.expect("No self param")
|
|
||||||
.ty_with_args(db, ty.type_arguments().chain(generics.iter().cloned()));
|
|
||||||
|
|
||||||
// Ignore functions that have different self type
|
|
||||||
if !self_ty.autoderef(db).any(|s_ty| ty == s_ty) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let target_type_exprs = lookup.find(db, &ty).expect("Type not in lookup");
|
|
||||||
|
|
||||||
// Early exit if some param cannot be filled from lookup
|
|
||||||
let param_exprs: Vec<Vec<Expr>> = it
|
|
||||||
.params_without_self_with_args(
|
|
||||||
db,
|
|
||||||
ty.type_arguments().chain(generics.iter().cloned()),
|
|
||||||
)
|
|
||||||
.into_iter()
|
|
||||||
.map(|field| lookup.find_autoref(db, field.ty()))
|
|
||||||
.collect::<Option<_>>()?;
|
|
||||||
|
|
||||||
let fn_exprs: Vec<Expr> = std::iter::once(target_type_exprs)
|
|
||||||
.chain(param_exprs)
|
|
||||||
.multi_cartesian_product()
|
|
||||||
.map(|params| {
|
|
||||||
let mut params = params.into_iter();
|
|
||||||
let target = Box::new(params.next().unwrap());
|
|
||||||
Expr::Method {
|
|
||||||
func: it,
|
|
||||||
generics: generics.clone(),
|
|
||||||
target,
|
|
||||||
params: params.collect(),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned());
|
|
||||||
Some((ret_ty, fn_exprs))
|
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
Some(exprs)
|
|
||||||
|
Some((ret_ty, fn_exprs))
|
||||||
})
|
})
|
||||||
.flatten()
|
|
||||||
.filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs))
|
.filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs))
|
||||||
.flatten()
|
.flatten()
|
||||||
}
|
}
|
||||||
|
@ -773,9 +598,8 @@ pub(super) fn famous_types<'a, DB: HirDatabase>(
|
||||||
Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::unit()), value: "()" },
|
Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::unit()), value: "()" },
|
||||||
]
|
]
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|exprs| {
|
.inspect(|exprs| {
|
||||||
lookup.insert(exprs.ty(db), std::iter::once(exprs.clone()));
|
lookup.insert(exprs.ty(db), std::iter::once(exprs.clone()));
|
||||||
exprs
|
|
||||||
})
|
})
|
||||||
.filter(|expr| expr.ty(db).could_unify_with_deeply(db, &ctx.goal))
|
.filter(|expr| expr.ty(db).could_unify_with_deeply(db, &ctx.goal))
|
||||||
}
|
}
|
||||||
|
@ -805,6 +629,7 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
|
||||||
.clone()
|
.clone()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(iter::once(ctx.goal.clone()))
|
.chain(iter::once(ctx.goal.clone()))
|
||||||
|
.filter(|ty| !ty.type_arguments().any(|it| it.contains_unknown()))
|
||||||
.filter(|_| should_continue())
|
.filter(|_| should_continue())
|
||||||
.flat_map(|ty| {
|
.flat_map(|ty| {
|
||||||
Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp))
|
Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp))
|
||||||
|
@ -815,24 +640,11 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
|
||||||
AssocItem::Function(f) => Some((imp, ty, f)),
|
AssocItem::Function(f) => Some((imp, ty, f)),
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
|
.filter(|_| should_continue())
|
||||||
.filter_map(move |(imp, ty, it)| {
|
.filter_map(move |(imp, ty, it)| {
|
||||||
let fn_generics = GenericDef::from(it);
|
let fn_generics = GenericDef::from(it);
|
||||||
let imp_generics = GenericDef::from(imp);
|
let imp_generics = GenericDef::from(imp);
|
||||||
|
|
||||||
// Ignore const params for now
|
|
||||||
let imp_type_params = imp_generics
|
|
||||||
.type_or_const_params(db)
|
|
||||||
.into_iter()
|
|
||||||
.map(|it| it.as_type_param(db))
|
|
||||||
.collect::<Option<Vec<TypeParam>>>()?;
|
|
||||||
|
|
||||||
// Ignore const params for now
|
|
||||||
let fn_type_params = fn_generics
|
|
||||||
.type_or_const_params(db)
|
|
||||||
.into_iter()
|
|
||||||
.map(|it| it.as_type_param(db))
|
|
||||||
.collect::<Option<Vec<TypeParam>>>()?;
|
|
||||||
|
|
||||||
// Ignore all functions that have something to do with lifetimes as we don't check them
|
// Ignore all functions that have something to do with lifetimes as we don't check them
|
||||||
if !fn_generics.lifetime_params(db).is_empty()
|
if !fn_generics.lifetime_params(db).is_empty()
|
||||||
|| !imp_generics.lifetime_params(db).is_empty()
|
|| !imp_generics.lifetime_params(db).is_empty()
|
||||||
|
@ -850,104 +662,43 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Only account for stable type parameters for now, unstable params can be default
|
// Ignore functions with generics for now as they kill the performance
|
||||||
// tho, for example in `Box<T, #[unstable] A: Allocator>`
|
// Also checking bounds for generics is problematic
|
||||||
if imp_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none())
|
if !fn_generics.type_or_const_params(db).is_empty() {
|
||||||
|| fn_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none())
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let ret_ty = it.ret_type_with_args(db, ty.type_arguments());
|
||||||
|
// Filter out functions that return references
|
||||||
|
if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) || ret_ty.is_raw_ptr()
|
||||||
{
|
{
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Double check that we have fully known type
|
// Early exit if some param cannot be filled from lookup
|
||||||
if ty.type_arguments().any(|it| it.contains_unknown()) {
|
let param_exprs: Vec<Vec<Expr>> = it
|
||||||
return None;
|
.params_without_self_with_args(db, ty.type_arguments())
|
||||||
}
|
|
||||||
|
|
||||||
let non_default_fn_type_params_len =
|
|
||||||
fn_type_params.iter().filter(|it| it.default(db).is_none()).count();
|
|
||||||
|
|
||||||
// Ignore functions with generics for now as they kill the performance
|
|
||||||
// Also checking bounds for generics is problematic
|
|
||||||
if non_default_fn_type_params_len > 0 {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let generic_params = lookup
|
|
||||||
.iter_types()
|
|
||||||
.collect::<Vec<_>>() // Force take ownership
|
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.permutations(non_default_fn_type_params_len);
|
.map(|field| lookup.find_autoref(db, field.ty()))
|
||||||
|
.collect::<Option<_>>()?;
|
||||||
|
|
||||||
let exprs: Vec<_> = generic_params
|
// Note that we need special case for 0 param constructors because of multi cartesian
|
||||||
.filter(|_| should_continue())
|
// product
|
||||||
.filter_map(|generics| {
|
let generics = ty.type_arguments().collect();
|
||||||
// Insert default type params
|
let fn_exprs: Vec<Expr> = if param_exprs.is_empty() {
|
||||||
let mut g = generics.into_iter();
|
vec![Expr::Function { func: it, generics, params: Vec::new() }]
|
||||||
let generics: Vec<_> = ty
|
} else {
|
||||||
.type_arguments()
|
param_exprs
|
||||||
.map(Some)
|
.into_iter()
|
||||||
.chain(fn_type_params.iter().map(|it| match it.default(db) {
|
.multi_cartesian_product()
|
||||||
Some(ty) => Some(ty),
|
.map(|params| Expr::Function { func: it, generics: generics.clone(), params })
|
||||||
None => {
|
.collect()
|
||||||
let generic = g.next().expect("Missing type param");
|
};
|
||||||
it.trait_bounds(db)
|
|
||||||
.into_iter()
|
|
||||||
.all(|bound| generic.impls_trait(db, bound, &[]));
|
|
||||||
// Filter out generics that do not unify due to trait bounds
|
|
||||||
it.ty(db).could_unify_with(db, &generic).then_some(generic)
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
.collect::<Option<_>>()?;
|
|
||||||
|
|
||||||
let ret_ty = it.ret_type_with_args(
|
lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned());
|
||||||
db,
|
|
||||||
ty.type_arguments().chain(generics.iter().cloned()),
|
|
||||||
);
|
|
||||||
// Filter out functions that return references
|
|
||||||
if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db)
|
|
||||||
|| ret_ty.is_raw_ptr()
|
|
||||||
{
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ignore functions that do not change the type
|
Some((ret_ty, fn_exprs))
|
||||||
// if ty.could_unify_with_deeply(db, &ret_ty) {
|
|
||||||
// return None;
|
|
||||||
// }
|
|
||||||
|
|
||||||
// Early exit if some param cannot be filled from lookup
|
|
||||||
let param_exprs: Vec<Vec<Expr>> = it
|
|
||||||
.params_without_self_with_args(
|
|
||||||
db,
|
|
||||||
ty.type_arguments().chain(generics.iter().cloned()),
|
|
||||||
)
|
|
||||||
.into_iter()
|
|
||||||
.map(|field| lookup.find_autoref(db, field.ty()))
|
|
||||||
.collect::<Option<_>>()?;
|
|
||||||
|
|
||||||
// Note that we need special case for 0 param constructors because of multi cartesian
|
|
||||||
// product
|
|
||||||
let fn_exprs: Vec<Expr> = if param_exprs.is_empty() {
|
|
||||||
vec![Expr::Function { func: it, generics, params: Vec::new() }]
|
|
||||||
} else {
|
|
||||||
param_exprs
|
|
||||||
.into_iter()
|
|
||||||
.multi_cartesian_product()
|
|
||||||
.map(|params| Expr::Function {
|
|
||||||
func: it,
|
|
||||||
generics: generics.clone(),
|
|
||||||
params,
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
};
|
|
||||||
|
|
||||||
lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned());
|
|
||||||
Some((ret_ty, fn_exprs))
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
Some(exprs)
|
|
||||||
})
|
})
|
||||||
.flatten()
|
|
||||||
.filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs))
|
.filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs))
|
||||||
.flatten()
|
.flatten()
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,6 +15,8 @@ pub struct AssistConfig {
|
||||||
pub insert_use: InsertUseConfig,
|
pub insert_use: InsertUseConfig,
|
||||||
pub prefer_no_std: bool,
|
pub prefer_no_std: bool,
|
||||||
pub prefer_prelude: bool,
|
pub prefer_prelude: bool,
|
||||||
|
pub prefer_absolute: bool,
|
||||||
pub assist_emit_must_use: bool,
|
pub assist_emit_must_use: bool,
|
||||||
pub term_search_fuel: u64,
|
pub term_search_fuel: u64,
|
||||||
|
pub term_search_borrowck: bool,
|
||||||
}
|
}
|
||||||
|
|
|
@ -74,6 +74,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
|
||||||
let cfg = ImportPathConfig {
|
let cfg = ImportPathConfig {
|
||||||
prefer_no_std: ctx.config.prefer_no_std,
|
prefer_no_std: ctx.config.prefer_no_std,
|
||||||
prefer_prelude: ctx.config.prefer_prelude,
|
prefer_prelude: ctx.config.prefer_prelude,
|
||||||
|
prefer_absolute: ctx.config.prefer_absolute,
|
||||||
};
|
};
|
||||||
|
|
||||||
let module = ctx.sema.scope(expr.syntax())?.module();
|
let module = ctx.sema.scope(expr.syntax())?.module();
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use ide_db::defs::{Definition, NameRefClass};
|
use ide_db::defs::{Definition, NameRefClass};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, make, HasArgList},
|
ast::{self, make, HasArgList, HasGenericArgs},
|
||||||
ted, AstNode,
|
ted, AstNode,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -93,6 +93,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
|
||||||
let cfg = ImportPathConfig {
|
let cfg = ImportPathConfig {
|
||||||
prefer_no_std: ctx.config.prefer_no_std,
|
prefer_no_std: ctx.config.prefer_no_std,
|
||||||
prefer_prelude: ctx.config.prefer_prelude,
|
prefer_prelude: ctx.config.prefer_prelude,
|
||||||
|
prefer_absolute: ctx.config.prefer_absolute,
|
||||||
};
|
};
|
||||||
|
|
||||||
let (import_assets, syntax_under_caret) = find_importable_node(ctx)?;
|
let (import_assets, syntax_under_caret) = find_importable_node(ctx)?;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
use either::Either;
|
||||||
use hir::{ImportPathConfig, ModuleDef};
|
use hir::{ImportPathConfig, ModuleDef};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
assists::{AssistId, AssistKind},
|
assists::{AssistId, AssistKind},
|
||||||
|
@ -76,7 +77,11 @@ pub(crate) fn bool_to_enum(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
|
||||||
|
|
||||||
let usages = definition.usages(&ctx.sema).all();
|
let usages = definition.usages(&ctx.sema).all();
|
||||||
add_enum_def(edit, ctx, &usages, target_node, &target_module);
|
add_enum_def(edit, ctx, &usages, target_node, &target_module);
|
||||||
replace_usages(edit, ctx, usages, definition, &target_module);
|
let mut delayed_mutations = Vec::new();
|
||||||
|
replace_usages(edit, ctx, usages, definition, &target_module, &mut delayed_mutations);
|
||||||
|
for (scope, path) in delayed_mutations {
|
||||||
|
insert_use(&scope, path, &ctx.config.insert_use);
|
||||||
|
}
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -91,29 +96,32 @@ struct BoolNodeData {
|
||||||
|
|
||||||
/// Attempts to find an appropriate node to apply the action to.
|
/// Attempts to find an appropriate node to apply the action to.
|
||||||
fn find_bool_node(ctx: &AssistContext<'_>) -> Option<BoolNodeData> {
|
fn find_bool_node(ctx: &AssistContext<'_>) -> Option<BoolNodeData> {
|
||||||
let name: ast::Name = ctx.find_node_at_offset()?;
|
let name = ctx.find_node_at_offset::<ast::Name>()?;
|
||||||
|
|
||||||
if let Some(let_stmt) = name.syntax().ancestors().find_map(ast::LetStmt::cast) {
|
if let Some(ident_pat) = name.syntax().parent().and_then(ast::IdentPat::cast) {
|
||||||
let bind_pat = match let_stmt.pat()? {
|
let def = ctx.sema.to_def(&ident_pat)?;
|
||||||
ast::Pat::IdentPat(pat) => pat,
|
|
||||||
_ => {
|
|
||||||
cov_mark::hit!(not_applicable_in_non_ident_pat);
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let def = ctx.sema.to_def(&bind_pat)?;
|
|
||||||
if !def.ty(ctx.db()).is_bool() {
|
if !def.ty(ctx.db()).is_bool() {
|
||||||
cov_mark::hit!(not_applicable_non_bool_local);
|
cov_mark::hit!(not_applicable_non_bool_local);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(BoolNodeData {
|
let local_definition = Definition::Local(def);
|
||||||
target_node: let_stmt.syntax().clone(),
|
match ident_pat.syntax().parent().and_then(Either::<ast::Param, ast::LetStmt>::cast)? {
|
||||||
name,
|
Either::Left(param) => Some(BoolNodeData {
|
||||||
ty_annotation: let_stmt.ty(),
|
target_node: param.syntax().clone(),
|
||||||
initializer: let_stmt.initializer(),
|
name,
|
||||||
definition: Definition::Local(def),
|
ty_annotation: param.ty(),
|
||||||
})
|
initializer: None,
|
||||||
|
definition: local_definition,
|
||||||
|
}),
|
||||||
|
Either::Right(let_stmt) => Some(BoolNodeData {
|
||||||
|
target_node: let_stmt.syntax().clone(),
|
||||||
|
name,
|
||||||
|
ty_annotation: let_stmt.ty(),
|
||||||
|
initializer: let_stmt.initializer(),
|
||||||
|
definition: local_definition,
|
||||||
|
}),
|
||||||
|
}
|
||||||
} else if let Some(const_) = name.syntax().parent().and_then(ast::Const::cast) {
|
} else if let Some(const_) = name.syntax().parent().and_then(ast::Const::cast) {
|
||||||
let def = ctx.sema.to_def(&const_)?;
|
let def = ctx.sema.to_def(&const_)?;
|
||||||
if !def.ty(ctx.db()).is_bool() {
|
if !def.ty(ctx.db()).is_bool() {
|
||||||
|
@ -197,6 +205,7 @@ fn replace_usages(
|
||||||
usages: UsageSearchResult,
|
usages: UsageSearchResult,
|
||||||
target_definition: Definition,
|
target_definition: Definition,
|
||||||
target_module: &hir::Module,
|
target_module: &hir::Module,
|
||||||
|
delayed_mutations: &mut Vec<(ImportScope, ast::Path)>,
|
||||||
) {
|
) {
|
||||||
for (file_id, references) in usages {
|
for (file_id, references) in usages {
|
||||||
edit.edit_file(file_id);
|
edit.edit_file(file_id);
|
||||||
|
@ -217,6 +226,7 @@ fn replace_usages(
|
||||||
def.usages(&ctx.sema).all(),
|
def.usages(&ctx.sema).all(),
|
||||||
target_definition,
|
target_definition,
|
||||||
target_module,
|
target_module,
|
||||||
|
delayed_mutations,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
} else if let Some(initializer) = find_assignment_usage(&name) {
|
} else if let Some(initializer) = find_assignment_usage(&name) {
|
||||||
|
@ -255,6 +265,7 @@ fn replace_usages(
|
||||||
def.usages(&ctx.sema).all(),
|
def.usages(&ctx.sema).all(),
|
||||||
target_definition,
|
target_definition,
|
||||||
target_module,
|
target_module,
|
||||||
|
delayed_mutations,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -306,7 +317,7 @@ fn replace_usages(
|
||||||
ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)),
|
ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)),
|
||||||
ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)),
|
ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)),
|
||||||
};
|
};
|
||||||
insert_use(&scope, path, &ctx.config.insert_use);
|
delayed_mutations.push((scope, path));
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -329,6 +340,7 @@ fn augment_references_with_imports(
|
||||||
let cfg = ImportPathConfig {
|
let cfg = ImportPathConfig {
|
||||||
prefer_no_std: ctx.config.prefer_no_std,
|
prefer_no_std: ctx.config.prefer_no_std,
|
||||||
prefer_prelude: ctx.config.prefer_prelude,
|
prefer_prelude: ctx.config.prefer_prelude,
|
||||||
|
prefer_absolute: ctx.config.prefer_absolute,
|
||||||
};
|
};
|
||||||
|
|
||||||
references
|
references
|
||||||
|
@ -449,7 +461,20 @@ fn add_enum_def(
|
||||||
usages: &UsageSearchResult,
|
usages: &UsageSearchResult,
|
||||||
target_node: SyntaxNode,
|
target_node: SyntaxNode,
|
||||||
target_module: &hir::Module,
|
target_module: &hir::Module,
|
||||||
) {
|
) -> Option<()> {
|
||||||
|
let insert_before = node_to_insert_before(target_node);
|
||||||
|
|
||||||
|
if ctx
|
||||||
|
.sema
|
||||||
|
.scope(&insert_before)?
|
||||||
|
.module()
|
||||||
|
.scope(ctx.db(), Some(*target_module))
|
||||||
|
.iter()
|
||||||
|
.any(|(name, _)| name.as_str() == Some("Bool"))
|
||||||
|
{
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
let make_enum_pub = usages
|
let make_enum_pub = usages
|
||||||
.iter()
|
.iter()
|
||||||
.flat_map(|(_, refs)| refs)
|
.flat_map(|(_, refs)| refs)
|
||||||
|
@ -460,7 +485,6 @@ fn add_enum_def(
|
||||||
.any(|module| module.nearest_non_block_module(ctx.db()) != *target_module);
|
.any(|module| module.nearest_non_block_module(ctx.db()) != *target_module);
|
||||||
let enum_def = make_bool_enum(make_enum_pub);
|
let enum_def = make_bool_enum(make_enum_pub);
|
||||||
|
|
||||||
let insert_before = node_to_insert_before(target_node);
|
|
||||||
let indent = IndentLevel::from_node(&insert_before);
|
let indent = IndentLevel::from_node(&insert_before);
|
||||||
enum_def.reindent_to(indent);
|
enum_def.reindent_to(indent);
|
||||||
|
|
||||||
|
@ -468,6 +492,8 @@ fn add_enum_def(
|
||||||
insert_before.text_range().start(),
|
insert_before.text_range().start(),
|
||||||
format!("{}\n\n{indent}", enum_def.syntax().text()),
|
format!("{}\n\n{indent}", enum_def.syntax().text()),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
Some(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Finds where to put the new enum definition.
|
/// Finds where to put the new enum definition.
|
||||||
|
@ -517,6 +543,125 @@ mod tests {
|
||||||
|
|
||||||
use crate::tests::{check_assist, check_assist_not_applicable};
|
use crate::tests::{check_assist, check_assist_not_applicable};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parameter_with_first_param_usage() {
|
||||||
|
check_assist(
|
||||||
|
bool_to_enum,
|
||||||
|
r#"
|
||||||
|
fn function($0foo: bool, bar: bool) {
|
||||||
|
if foo {
|
||||||
|
println!("foo");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
#[derive(PartialEq, Eq)]
|
||||||
|
enum Bool { True, False }
|
||||||
|
|
||||||
|
fn function(foo: Bool, bar: bool) {
|
||||||
|
if foo == Bool::True {
|
||||||
|
println!("foo");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn no_duplicate_enums() {
|
||||||
|
check_assist(
|
||||||
|
bool_to_enum,
|
||||||
|
r#"
|
||||||
|
#[derive(PartialEq, Eq)]
|
||||||
|
enum Bool { True, False }
|
||||||
|
|
||||||
|
fn function(foo: bool, $0bar: bool) {
|
||||||
|
if bar {
|
||||||
|
println!("bar");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
#[derive(PartialEq, Eq)]
|
||||||
|
enum Bool { True, False }
|
||||||
|
|
||||||
|
fn function(foo: bool, bar: Bool) {
|
||||||
|
if bar == Bool::True {
|
||||||
|
println!("bar");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parameter_with_last_param_usage() {
|
||||||
|
check_assist(
|
||||||
|
bool_to_enum,
|
||||||
|
r#"
|
||||||
|
fn function(foo: bool, $0bar: bool) {
|
||||||
|
if bar {
|
||||||
|
println!("bar");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
#[derive(PartialEq, Eq)]
|
||||||
|
enum Bool { True, False }
|
||||||
|
|
||||||
|
fn function(foo: bool, bar: Bool) {
|
||||||
|
if bar == Bool::True {
|
||||||
|
println!("bar");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parameter_with_middle_param_usage() {
|
||||||
|
check_assist(
|
||||||
|
bool_to_enum,
|
||||||
|
r#"
|
||||||
|
fn function(foo: bool, $0bar: bool, baz: bool) {
|
||||||
|
if bar {
|
||||||
|
println!("bar");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
#[derive(PartialEq, Eq)]
|
||||||
|
enum Bool { True, False }
|
||||||
|
|
||||||
|
fn function(foo: bool, bar: Bool, baz: bool) {
|
||||||
|
if bar == Bool::True {
|
||||||
|
println!("bar");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parameter_with_closure_usage() {
|
||||||
|
check_assist(
|
||||||
|
bool_to_enum,
|
||||||
|
r#"
|
||||||
|
fn main() {
|
||||||
|
let foo = |$0bar: bool| bar;
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
#[derive(PartialEq, Eq)]
|
||||||
|
enum Bool { True, False }
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let foo = |bar: Bool| bar == Bool::True;
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn local_variable_with_usage() {
|
fn local_variable_with_usage() {
|
||||||
check_assist(
|
check_assist(
|
||||||
|
@ -784,7 +929,6 @@ fn main() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn local_variable_non_ident_pat() {
|
fn local_variable_non_ident_pat() {
|
||||||
cov_mark::check!(not_applicable_in_non_ident_pat);
|
|
||||||
check_assist_not_applicable(
|
check_assist_not_applicable(
|
||||||
bool_to_enum,
|
bool_to_enum,
|
||||||
r#"
|
r#"
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use ide_db::{famous_defs::FamousDefs, traits::resolve_target_trait};
|
use ide_db::{famous_defs::FamousDefs, traits::resolve_target_trait};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, make, AstNode, HasName},
|
ast::{self, make, AstNode, HasGenericArgs, HasName},
|
||||||
ted,
|
ted,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use hir::ImportPathConfig;
|
use hir::ImportPathConfig;
|
||||||
use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast, traits::resolve_target_trait};
|
use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast, traits::resolve_target_trait};
|
||||||
use syntax::ast::{self, AstNode, HasName};
|
use syntax::ast::{self, AstNode, HasGenericArgs, HasName};
|
||||||
|
|
||||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||||
|
|
||||||
|
@ -47,6 +47,7 @@ pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) -
|
||||||
let cfg = ImportPathConfig {
|
let cfg = ImportPathConfig {
|
||||||
prefer_no_std: ctx.config.prefer_no_std,
|
prefer_no_std: ctx.config.prefer_no_std,
|
||||||
prefer_prelude: ctx.config.prefer_prelude,
|
prefer_prelude: ctx.config.prefer_prelude,
|
||||||
|
prefer_absolute: ctx.config.prefer_absolute,
|
||||||
};
|
};
|
||||||
|
|
||||||
let src_type_path = {
|
let src_type_path = {
|
||||||
|
|
|
@ -186,6 +186,7 @@ fn augment_references_with_imports(
|
||||||
let cfg = ImportPathConfig {
|
let cfg = ImportPathConfig {
|
||||||
prefer_no_std: ctx.config.prefer_no_std,
|
prefer_no_std: ctx.config.prefer_no_std,
|
||||||
prefer_prelude: ctx.config.prefer_prelude,
|
prefer_prelude: ctx.config.prefer_prelude,
|
||||||
|
prefer_absolute: ctx.config.prefer_absolute,
|
||||||
};
|
};
|
||||||
|
|
||||||
references
|
references
|
||||||
|
|
|
@ -90,6 +90,7 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option<Str
|
||||||
let cfg = ImportPathConfig {
|
let cfg = ImportPathConfig {
|
||||||
prefer_no_std: ctx.config.prefer_no_std,
|
prefer_no_std: ctx.config.prefer_no_std,
|
||||||
prefer_prelude: ctx.config.prefer_prelude,
|
prefer_prelude: ctx.config.prefer_prelude,
|
||||||
|
prefer_absolute: ctx.config.prefer_absolute,
|
||||||
};
|
};
|
||||||
|
|
||||||
let module = ctx.sema.scope(ident_pat.syntax())?.module();
|
let module = ctx.sema.scope(ident_pat.syntax())?.module();
|
||||||
|
|
|
@ -216,6 +216,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
|
||||||
ImportPathConfig {
|
ImportPathConfig {
|
||||||
prefer_no_std: ctx.config.prefer_no_std,
|
prefer_no_std: ctx.config.prefer_no_std,
|
||||||
prefer_prelude: ctx.config.prefer_prelude,
|
prefer_prelude: ctx.config.prefer_prelude,
|
||||||
|
prefer_absolute: ctx.config.prefer_absolute,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -393,6 +393,7 @@ fn process_references(
|
||||||
ImportPathConfig {
|
ImportPathConfig {
|
||||||
prefer_no_std: ctx.config.prefer_no_std,
|
prefer_no_std: ctx.config.prefer_no_std,
|
||||||
prefer_prelude: ctx.config.prefer_prelude,
|
prefer_prelude: ctx.config.prefer_prelude,
|
||||||
|
prefer_absolute: ctx.config.prefer_absolute,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
if let Some(mut mod_path) = mod_path {
|
if let Some(mut mod_path) = mod_path {
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use ide_db::syntax_helpers::node_ext::walk_ty;
|
use ide_db::syntax_helpers::node_ext::walk_ty;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, edit::IndentLevel, make, AstNode, HasGenericParams, HasName},
|
ast::{self, edit::IndentLevel, make, AstNode, HasGenericArgs, HasGenericParams, HasName},
|
||||||
ted,
|
ted,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -17,8 +17,9 @@ use syntax::{
|
||||||
self,
|
self,
|
||||||
edit::{self, AstNodeEdit},
|
edit::{self, AstNodeEdit},
|
||||||
edit_in_place::AttrsOwnerEdit,
|
edit_in_place::AttrsOwnerEdit,
|
||||||
make, AssocItem, GenericArgList, GenericParamList, HasAttrs, HasGenericParams, HasName,
|
make, AssocItem, GenericArgList, GenericParamList, HasAttrs, HasGenericArgs,
|
||||||
HasTypeBounds, HasVisibility as astHasVisibility, Path, WherePred,
|
HasGenericParams, HasName, HasTypeBounds, HasVisibility as astHasVisibility, Path,
|
||||||
|
WherePred,
|
||||||
},
|
},
|
||||||
ted::{self, Position},
|
ted::{self, Position},
|
||||||
AstNode, NodeOrToken, SmolStr, SyntaxKind,
|
AstNode, NodeOrToken, SmolStr, SyntaxKind,
|
||||||
|
|
|
@ -64,6 +64,7 @@ fn generate_record_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
|
||||||
ImportPathConfig {
|
ImportPathConfig {
|
||||||
prefer_no_std: ctx.config.prefer_no_std,
|
prefer_no_std: ctx.config.prefer_no_std,
|
||||||
prefer_prelude: ctx.config.prefer_prelude,
|
prefer_prelude: ctx.config.prefer_prelude,
|
||||||
|
prefer_absolute: ctx.config.prefer_absolute,
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
@ -111,6 +112,7 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()
|
||||||
ImportPathConfig {
|
ImportPathConfig {
|
||||||
prefer_no_std: ctx.config.prefer_no_std,
|
prefer_no_std: ctx.config.prefer_no_std,
|
||||||
prefer_prelude: ctx.config.prefer_prelude,
|
prefer_prelude: ctx.config.prefer_prelude,
|
||||||
|
prefer_absolute: ctx.config.prefer_absolute,
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ use itertools::Itertools;
|
||||||
use stdx::{format_to, to_lower_snake_case};
|
use stdx::{format_to, to_lower_snake_case};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
algo::skip_whitespace_token,
|
algo::skip_whitespace_token,
|
||||||
ast::{self, edit::IndentLevel, HasDocComments, HasName},
|
ast::{self, edit::IndentLevel, HasDocComments, HasGenericArgs, HasName},
|
||||||
match_ast, AstNode, AstToken,
|
match_ast, AstNode, AstToken,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -65,6 +65,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
|
||||||
ImportPathConfig {
|
ImportPathConfig {
|
||||||
prefer_no_std: ctx.config.prefer_no_std,
|
prefer_no_std: ctx.config.prefer_no_std,
|
||||||
prefer_prelude: ctx.config.prefer_prelude,
|
prefer_prelude: ctx.config.prefer_prelude,
|
||||||
|
prefer_absolute: ctx.config.prefer_absolute,
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,9 @@ use ide_db::{
|
||||||
};
|
};
|
||||||
use itertools::{izip, Itertools};
|
use itertools::{izip, Itertools};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, edit::IndentLevel, edit_in_place::Indent, HasArgList, Pat, PathExpr},
|
ast::{
|
||||||
|
self, edit::IndentLevel, edit_in_place::Indent, HasArgList, HasGenericArgs, Pat, PathExpr,
|
||||||
|
},
|
||||||
ted, AstNode, NodeOrToken, SyntaxKind,
|
ted, AstNode, NodeOrToken, SyntaxKind,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -489,6 +489,25 @@ use foo::bar;
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_merge_nested_empty_and_self_with_other() {
|
||||||
|
check_assist(
|
||||||
|
merge_imports,
|
||||||
|
r"
|
||||||
|
use foo::$0{bar};
|
||||||
|
use foo::{bar::{self, other}};
|
||||||
|
",
|
||||||
|
r"
|
||||||
|
use foo::bar::{self, other};
|
||||||
|
",
|
||||||
|
);
|
||||||
|
check_assist_import_one_variations!(
|
||||||
|
"foo::$0{bar}",
|
||||||
|
"foo::{bar::{self, other}}",
|
||||||
|
"use {foo::bar::{self, other}};"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_merge_nested_list_self_and_glob() {
|
fn test_merge_nested_list_self_and_glob() {
|
||||||
check_assist(
|
check_assist(
|
||||||
|
|
|
@ -119,10 +119,39 @@ mod tests {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_braces_kept() {
|
||||||
|
check_assist_not_applicable_variations!("foo::bar::{$0self}");
|
||||||
|
|
||||||
|
// This code compiles but transforming "bar::{self}" into "bar" causes a
|
||||||
|
// compilation error (the name `bar` is defined multiple times).
|
||||||
|
// Therefore, the normalize_input assist must not apply here.
|
||||||
|
check_assist_not_applicable(
|
||||||
|
normalize_import,
|
||||||
|
r"
|
||||||
|
mod foo {
|
||||||
|
|
||||||
|
pub mod bar {}
|
||||||
|
|
||||||
|
pub const bar: i32 = 8;
|
||||||
|
}
|
||||||
|
|
||||||
|
use foo::bar::{$0self};
|
||||||
|
|
||||||
|
const bar: u32 = 99;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let local_bar = bar;
|
||||||
|
}
|
||||||
|
|
||||||
|
",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_redundant_braces() {
|
fn test_redundant_braces() {
|
||||||
check_assist_variations!("foo::{bar::{baz, Qux}}", "foo::bar::{baz, Qux}");
|
check_assist_variations!("foo::{bar::{baz, Qux}}", "foo::bar::{baz, Qux}");
|
||||||
check_assist_variations!("foo::{bar::{self}}", "foo::bar");
|
check_assist_variations!("foo::{bar::{self}}", "foo::bar::{self}");
|
||||||
check_assist_variations!("foo::{bar::{*}}", "foo::bar::*");
|
check_assist_variations!("foo::{bar::{*}}", "foo::bar::*");
|
||||||
check_assist_variations!("foo::{bar::{Qux as Quux}}", "foo::bar::Qux as Quux");
|
check_assist_variations!("foo::{bar::{Qux as Quux}}", "foo::bar::Qux as Quux");
|
||||||
check_assist_variations!(
|
check_assist_variations!(
|
||||||
|
|
|
@ -53,6 +53,7 @@ pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) ->
|
||||||
ImportPathConfig {
|
ImportPathConfig {
|
||||||
prefer_no_std: ctx.config.prefer_no_std,
|
prefer_no_std: ctx.config.prefer_no_std,
|
||||||
prefer_prelude: ctx.config.prefer_prelude,
|
prefer_prelude: ctx.config.prefer_prelude,
|
||||||
|
prefer_absolute: ctx.config.prefer_absolute,
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,7 @@ use ide_db::{
|
||||||
helpers::mod_path_to_ast,
|
helpers::mod_path_to_ast,
|
||||||
imports::import_assets::{ImportCandidate, LocatedImport},
|
imports::import_assets::{ImportCandidate, LocatedImport},
|
||||||
};
|
};
|
||||||
|
use syntax::ast::HasGenericArgs;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast,
|
ast,
|
||||||
ast::{make, HasArgList},
|
ast::{make, HasArgList},
|
||||||
|
@ -40,6 +41,7 @@ pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
|
||||||
let cfg = ImportPathConfig {
|
let cfg = ImportPathConfig {
|
||||||
prefer_no_std: ctx.config.prefer_no_std,
|
prefer_no_std: ctx.config.prefer_no_std,
|
||||||
prefer_prelude: ctx.config.prefer_prelude,
|
prefer_prelude: ctx.config.prefer_prelude,
|
||||||
|
prefer_absolute: ctx.config.prefer_absolute,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut proposed_imports: Vec<_> =
|
let mut proposed_imports: Vec<_> =
|
||||||
|
|
|
@ -239,4 +239,33 @@ mod tests {
|
||||||
|
|
||||||
check_assist_not_applicable(remove_parentheses, r#"fn f() { $0(return 2) + 2 }"#);
|
check_assist_not_applicable(remove_parentheses, r#"fn f() { $0(return 2) + 2 }"#);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn remove_parens_indirect_calls() {
|
||||||
|
check_assist(
|
||||||
|
remove_parentheses,
|
||||||
|
r#"fn f(call: fn(usize), arg: usize) { $0(call)(arg); }"#,
|
||||||
|
r#"fn f(call: fn(usize), arg: usize) { call(arg); }"#,
|
||||||
|
);
|
||||||
|
check_assist(
|
||||||
|
remove_parentheses,
|
||||||
|
r#"fn f<F>(call: F, arg: usize) where F: Fn(usize) { $0(call)(arg); }"#,
|
||||||
|
r#"fn f<F>(call: F, arg: usize) where F: Fn(usize) { call(arg); }"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Parentheses are necessary when calling a function-like pointer that is a member of a struct or union.
|
||||||
|
check_assist_not_applicable(
|
||||||
|
remove_parentheses,
|
||||||
|
r#"
|
||||||
|
struct Foo<T> {
|
||||||
|
t: T,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Foo<fn(usize)> {
|
||||||
|
fn foo(&self, arg: usize) {
|
||||||
|
$0(self.t)(arg);
|
||||||
|
}
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -89,6 +89,7 @@ pub(crate) fn replace_derive_with_manual_impl(
|
||||||
ImportPathConfig {
|
ImportPathConfig {
|
||||||
prefer_no_std: ctx.config.prefer_no_std,
|
prefer_no_std: ctx.config.prefer_no_std,
|
||||||
prefer_prelude: ctx.config.prefer_prelude,
|
prefer_prelude: ctx.config.prefer_prelude,
|
||||||
|
prefer_absolute: ctx.config.prefer_absolute,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.as_ref()
|
.as_ref()
|
||||||
|
|
|
@ -4,7 +4,7 @@ use ide_db::{
|
||||||
imports::insert_use::{insert_use, ImportScope},
|
imports::insert_use::{insert_use, ImportScope},
|
||||||
};
|
};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, make},
|
ast::{self, make, HasGenericArgs},
|
||||||
match_ast, ted, AstNode, SyntaxNode,
|
match_ast, ted, AstNode, SyntaxNode,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -70,6 +70,7 @@ pub(crate) fn replace_qualified_name_with_use(
|
||||||
ImportPathConfig {
|
ImportPathConfig {
|
||||||
prefer_no_std: ctx.config.prefer_no_std,
|
prefer_no_std: ctx.config.prefer_no_std,
|
||||||
prefer_prelude: ctx.config.prefer_prelude,
|
prefer_prelude: ctx.config.prefer_prelude,
|
||||||
|
prefer_absolute: ctx.config.prefer_absolute,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
use hir::HirDisplay;
|
use hir::HirDisplay;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{Expr, GenericArg, GenericArgList},
|
ast::{Expr, GenericArg, GenericArgList, HasGenericArgs, LetStmt, Type::InferType},
|
||||||
ast::{LetStmt, Type::InferType},
|
|
||||||
AstNode, TextRange,
|
AstNode, TextRange,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -37,7 +37,11 @@ pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
|
||||||
sema: &ctx.sema,
|
sema: &ctx.sema,
|
||||||
scope: &scope,
|
scope: &scope,
|
||||||
goal: target_ty,
|
goal: target_ty,
|
||||||
config: TermSearchConfig { fuel: ctx.config.term_search_fuel, ..Default::default() },
|
config: TermSearchConfig {
|
||||||
|
fuel: ctx.config.term_search_fuel,
|
||||||
|
enable_borrowcheck: ctx.config.term_search_borrowck,
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
};
|
};
|
||||||
let paths = hir::term_search::term_search(&term_search_ctx);
|
let paths = hir::term_search::term_search(&term_search_ctx);
|
||||||
|
|
||||||
|
@ -56,6 +60,7 @@ pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
|
||||||
ImportPathConfig {
|
ImportPathConfig {
|
||||||
prefer_no_std: ctx.config.prefer_no_std,
|
prefer_no_std: ctx.config.prefer_no_std,
|
||||||
prefer_prelude: ctx.config.prefer_prelude,
|
prefer_prelude: ctx.config.prefer_prelude,
|
||||||
|
prefer_absolute: ctx.config.prefer_absolute,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.ok()
|
.ok()
|
||||||
|
@ -144,7 +149,7 @@ fn f() { let a = A { x: 1, y: true }; let b: i32 = a.x; }"#,
|
||||||
term_search,
|
term_search,
|
||||||
r#"//- minicore: todo, unimplemented, option
|
r#"//- minicore: todo, unimplemented, option
|
||||||
fn f() { let a: i32 = 1; let b: Option<i32> = todo$0!(); }"#,
|
fn f() { let a: i32 = 1; let b: Option<i32> = todo$0!(); }"#,
|
||||||
r#"fn f() { let a: i32 = 1; let b: Option<i32> = None; }"#,
|
r#"fn f() { let a: i32 = 1; let b: Option<i32> = Some(a); }"#,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ use ide_db::{
|
||||||
famous_defs::FamousDefs,
|
famous_defs::FamousDefs,
|
||||||
};
|
};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasVisibility},
|
ast::{self, HasGenericArgs, HasVisibility},
|
||||||
AstNode, NodeOrToken, SyntaxKind, SyntaxNode, SyntaxToken, TextRange,
|
AstNode, NodeOrToken, SyntaxKind, SyntaxNode, SyntaxToken, TextRange,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -142,6 +142,7 @@ pub(crate) fn desugar_async_into_impl_future(
|
||||||
ImportPathConfig {
|
ImportPathConfig {
|
||||||
prefer_no_std: ctx.config.prefer_no_std,
|
prefer_no_std: ctx.config.prefer_no_std,
|
||||||
prefer_prelude: ctx.config.prefer_prelude,
|
prefer_prelude: ctx.config.prefer_prelude,
|
||||||
|
prefer_absolute: ctx.config.prefer_absolute,
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
let trait_path = trait_path.display(ctx.db());
|
let trait_path = trait_path.display(ctx.db());
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue