mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 05:23:24 +00:00
⬆️ rust-analyzer
This commit is contained in:
parent
134701885d
commit
31519bb394
83 changed files with 2092 additions and 626 deletions
6
.github/workflows/release.yaml
vendored
6
.github/workflows/release.yaml
vendored
|
@ -31,13 +31,13 @@ jobs:
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
target: aarch64-pc-windows-msvc
|
target: aarch64-pc-windows-msvc
|
||||||
code-target: win32-arm64
|
code-target: win32-arm64
|
||||||
- os: ubuntu-18.04
|
- os: ubuntu-20.04
|
||||||
target: x86_64-unknown-linux-gnu
|
target: x86_64-unknown-linux-gnu
|
||||||
code-target: linux-x64
|
code-target: linux-x64
|
||||||
- os: ubuntu-18.04
|
- os: ubuntu-20.04
|
||||||
target: aarch64-unknown-linux-gnu
|
target: aarch64-unknown-linux-gnu
|
||||||
code-target: linux-arm64
|
code-target: linux-arm64
|
||||||
- os: ubuntu-18.04
|
- os: ubuntu-20.04
|
||||||
target: arm-unknown-linux-gnueabihf
|
target: arm-unknown-linux-gnueabihf
|
||||||
code-target: linux-armhf
|
code-target: linux-armhf
|
||||||
- os: macos-11
|
- os: macos-11
|
||||||
|
|
167
Cargo.lock
generated
167
Cargo.lock
generated
|
@ -37,9 +37,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anyhow"
|
name = "anyhow"
|
||||||
version = "1.0.58"
|
version = "1.0.62"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "bb07d2053ccdbe10e2af2995a2f116c1330396493dc1269f6a91d0ae82e19704"
|
checksum = "1485d4d2cc45e7b201ee3767015c96faa5904387c9d87c6efdd0fb511f12d305"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anymap"
|
name = "anymap"
|
||||||
|
@ -78,16 +78,16 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "backtrace"
|
name = "backtrace"
|
||||||
version = "0.3.65"
|
version = "0.3.66"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "11a17d453482a265fd5f8479f2a3f405566e6ca627837aaddb85af8b1ab8ef61"
|
checksum = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"addr2line",
|
"addr2line",
|
||||||
"cc",
|
"cc",
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"libc",
|
"libc",
|
||||||
"miniz_oxide",
|
"miniz_oxide",
|
||||||
"object 0.28.4",
|
"object",
|
||||||
"rustc-demangle",
|
"rustc-demangle",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -114,9 +114,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "camino"
|
name = "camino"
|
||||||
version = "1.0.9"
|
version = "1.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "869119e97797867fd90f5e22af7d0bd274bd4635ebb9eb68c04f3f513ae6c412"
|
checksum = "88ad0e1e3e88dd237a156ab9f571021b8a158caa0ae44b1968a241efb5144c1e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
@ -171,9 +171,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "chalk-derive"
|
name = "chalk-derive"
|
||||||
version = "0.83.0"
|
version = "0.84.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "83553c2ef7717e58aecdf42dd9e3c876229f5a1f35a16435b5ddc4addef81827"
|
checksum = "cf29c109d57f8d57b0e7675391be37a9285d86dd93278bd5f14a0ad3c447a6c2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
@ -183,9 +183,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "chalk-ir"
|
name = "chalk-ir"
|
||||||
version = "0.83.0"
|
version = "0.84.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2dd42107d579d8ec2a5af20a8de62a37524a67bf6a4c0ff08a950068f0bfea91"
|
checksum = "d391763027b5e50a5e15caf6d2857ec585fd68160367bbeac9e1804209620918"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags",
|
||||||
"chalk-derive",
|
"chalk-derive",
|
||||||
|
@ -194,9 +194,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "chalk-recursive"
|
name = "chalk-recursive"
|
||||||
version = "0.83.0"
|
version = "0.84.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c444031541a76c13c145e76d91f1548e9feb2240e7f0c3e77879ceb694994f2d"
|
checksum = "afafd92dcdc7fe0ea940ee94bdd8cc5bd18f4a4a84c593d6d7025fe16c150478"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"chalk-derive",
|
"chalk-derive",
|
||||||
"chalk-ir",
|
"chalk-ir",
|
||||||
|
@ -207,9 +207,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "chalk-solve"
|
name = "chalk-solve"
|
||||||
version = "0.83.0"
|
version = "0.84.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c76f2db19c5e8a3d42340cf5b4d90b8c218750536fca35e2bb285ab6653c0bc8"
|
checksum = "3af1d111f11c91c48ace02e93e470c5bae6d2631bd112e4545317da53660d7fc"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"chalk-derive",
|
"chalk-derive",
|
||||||
"chalk-ir",
|
"chalk-ir",
|
||||||
|
@ -249,9 +249,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crossbeam"
|
name = "crossbeam"
|
||||||
version = "0.8.1"
|
version = "0.8.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4ae5588f6b3c3cb05239e90bd110f257254aecd01e4635400391aeae07497845"
|
checksum = "2801af0d36612ae591caa9568261fddce32ce6e08a7275ea334a06a4ad021a2c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"crossbeam-channel",
|
"crossbeam-channel",
|
||||||
|
@ -263,9 +263,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crossbeam-channel"
|
name = "crossbeam-channel"
|
||||||
version = "0.5.5"
|
version = "0.5.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4c02a4d71819009c192cf4872265391563fd6a84c81ff2c0f2a7026ca4c1d85c"
|
checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"crossbeam-utils",
|
"crossbeam-utils",
|
||||||
|
@ -273,9 +273,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crossbeam-deque"
|
name = "crossbeam-deque"
|
||||||
version = "0.8.1"
|
version = "0.8.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e"
|
checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"crossbeam-epoch",
|
"crossbeam-epoch",
|
||||||
|
@ -284,9 +284,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crossbeam-epoch"
|
name = "crossbeam-epoch"
|
||||||
version = "0.9.9"
|
version = "0.9.10"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "07db9d94cbd326813772c968ccd25999e5f8ae22f4f8d1b11effa37ef6ce281d"
|
checksum = "045ebe27666471bb549370b4b0b3e51b07f56325befa4284db65fc89c02511b1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"autocfg",
|
"autocfg",
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
|
@ -298,9 +298,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crossbeam-queue"
|
name = "crossbeam-queue"
|
||||||
version = "0.3.5"
|
version = "0.3.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1f25d8400f4a7a5778f0e4e52384a48cbd9b5c495d110786187fc750075277a2"
|
checksum = "1cd42583b04998a5363558e5f9291ee5a5ff6b49944332103f251e7479a82aa7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"crossbeam-utils",
|
"crossbeam-utils",
|
||||||
|
@ -308,9 +308,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crossbeam-utils"
|
name = "crossbeam-utils"
|
||||||
version = "0.8.10"
|
version = "0.8.11"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7d82ee10ce34d7bc12c2122495e7593a9c41347ecdd64185af4ecf72cb1a7f83"
|
checksum = "51887d4adc7b564537b15adcfb307936f8075dfcd5f00dde9a9f1d29383682bc"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
|
@ -359,9 +359,9 @@ checksum = "9bda8e21c04aca2ae33ffc2fd8c23134f3cac46db123ba97bd9d3f3b8a4a85e1"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "either"
|
name = "either"
|
||||||
version = "1.7.0"
|
version = "1.8.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3f107b87b6afc2a64fd13cac55fe06d6c8859f12d4b14cbcdd2c67d0976781be"
|
checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ena"
|
name = "ena"
|
||||||
|
@ -458,15 +458,15 @@ checksum = "7ab85b9b05e3978cc9a9cf8fea7f01b494e1a09ed3037e16ba39edc7a29eb61a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "gimli"
|
name = "gimli"
|
||||||
version = "0.26.1"
|
version = "0.26.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4"
|
checksum = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hashbrown"
|
name = "hashbrown"
|
||||||
version = "0.12.1"
|
version = "0.12.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "db0d4cf898abf0081f964436dc980e96670a0f36863e4b83aaacdb65c9d7ccc3"
|
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "heck"
|
name = "heck"
|
||||||
|
@ -794,9 +794,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "itoa"
|
name = "itoa"
|
||||||
version = "1.0.2"
|
version = "1.0.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d"
|
checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "jod-thread"
|
name = "jod-thread"
|
||||||
|
@ -836,9 +836,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libc"
|
name = "libc"
|
||||||
version = "0.2.126"
|
version = "0.2.132"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836"
|
checksum = "8371e4e5341c3a96db127eb2465ac681ced4c433e01dd0e938adbef26ba93ba5"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libloading"
|
name = "libloading"
|
||||||
|
@ -944,9 +944,9 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "memmap2"
|
name = "memmap2"
|
||||||
version = "0.5.4"
|
version = "0.5.7"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d5172b50c23043ff43dd53e51392f36519d9b35a8f3a410d30ece5d1aedd58ae"
|
checksum = "95af15f345b17af2efc8ead6080fb8bc376f8cec1b35277b935637595fe77498"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
@ -1001,9 +1001,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "notify"
|
name = "notify"
|
||||||
version = "5.0.0-pre.15"
|
version = "5.0.0-pre.16"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "553f9844ad0b0824605c20fb55a661679782680410abfb1a8144c2e7e437e7a7"
|
checksum = "530f6314d6904508082f4ea424a0275cf62d341e118b313663f266429cb19693"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags",
|
||||||
"crossbeam-channel",
|
"crossbeam-channel",
|
||||||
|
@ -1027,15 +1027,6 @@ dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "object"
|
|
||||||
version = "0.28.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "e42c982f2d955fac81dd7e1d0e1426a7d702acd9c98d19ab01083a6a0328c424"
|
|
||||||
dependencies = [
|
|
||||||
"memchr",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "object"
|
name = "object"
|
||||||
version = "0.29.0"
|
version = "0.29.0"
|
||||||
|
@ -1047,9 +1038,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "once_cell"
|
name = "once_cell"
|
||||||
version = "1.13.0"
|
version = "1.13.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1"
|
checksum = "074864da206b4973b84eb91683020dbefd6a8c3f0f38e054d93954e891935e4e"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "oorandom"
|
name = "oorandom"
|
||||||
|
@ -1118,9 +1109,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "paste"
|
name = "paste"
|
||||||
version = "1.0.7"
|
version = "1.0.8"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0c520e05135d6e763148b6426a837e239041653ba7becd2e538c076c738025fc"
|
checksum = "9423e2b32f7a043629287a536f21951e8c6a82482d0acb1eeebfc90bc2225b22"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "paths"
|
name = "paths"
|
||||||
|
@ -1172,7 +1163,7 @@ name = "proc-macro-api"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"memmap2",
|
"memmap2",
|
||||||
"object 0.29.0",
|
"object",
|
||||||
"paths",
|
"paths",
|
||||||
"profile",
|
"profile",
|
||||||
"serde",
|
"serde",
|
||||||
|
@ -1192,7 +1183,7 @@ dependencies = [
|
||||||
"libloading",
|
"libloading",
|
||||||
"mbe",
|
"mbe",
|
||||||
"memmap2",
|
"memmap2",
|
||||||
"object 0.29.0",
|
"object",
|
||||||
"paths",
|
"paths",
|
||||||
"proc-macro-api",
|
"proc-macro-api",
|
||||||
"proc-macro-test",
|
"proc-macro-test",
|
||||||
|
@ -1221,9 +1212,9 @@ version = "0.0.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.40"
|
version = "1.0.43"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "dd96a1e8ed2596c337f8eae5f24924ec83f5ad5ab21ea8e455d3566c69fbcaf7"
|
checksum = "0a2ca2c61bc9f3d74d2886294ab7b9853abd9c1ad903a3ac7815c58989bb7bab"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
@ -1265,9 +1256,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pulldown-cmark"
|
name = "pulldown-cmark"
|
||||||
version = "0.9.1"
|
version = "0.9.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "34f197a544b0c9ab3ae46c359a7ec9cbbb5c7bf97054266fecb7ead794a181d6"
|
checksum = "2d9cc634bc78768157b5cbfe988ffcd1dcba95cd2b2f03a88316c08c6d00ed63"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags",
|
||||||
"memchr",
|
"memchr",
|
||||||
|
@ -1285,9 +1276,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quote"
|
name = "quote"
|
||||||
version = "1.0.20"
|
version = "1.0.21"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3bcdf212e9776fbcb2d23ab029360416bb1706b1aea2d1a5ba002727cbcab804"
|
checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
]
|
]
|
||||||
|
@ -1318,18 +1309,18 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "redox_syscall"
|
name = "redox_syscall"
|
||||||
version = "0.2.13"
|
version = "0.2.16"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "62f25bc4c7e55e0b0b7a1d43fb893f4fa1361d0abe38b9ce4f323c2adfe6ef42"
|
checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex"
|
name = "regex"
|
||||||
version = "1.5.6"
|
version = "1.6.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d83f127d94bdbcda4c8cc2e50f6f84f4b611f69c902699ca385a39c3a75f9ff1"
|
checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"regex-syntax",
|
"regex-syntax",
|
||||||
]
|
]
|
||||||
|
@ -1345,9 +1336,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex-syntax"
|
name = "regex-syntax"
|
||||||
version = "0.6.26"
|
version = "0.6.27"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "49b3de9ec5dc0a3417da371aab17d729997c15010e7fd24ff707773a33bddb64"
|
checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rowan"
|
name = "rowan"
|
||||||
|
@ -1438,9 +1429,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ryu"
|
name = "ryu"
|
||||||
version = "1.0.10"
|
version = "1.0.11"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695"
|
checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "salsa"
|
name = "salsa"
|
||||||
|
@ -1494,27 +1485,27 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "semver"
|
name = "semver"
|
||||||
version = "1.0.12"
|
version = "1.0.13"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a2333e6df6d6598f2b1974829f853c2b4c5f4a6e503c10af918081aa6f8564e1"
|
checksum = "93f6841e709003d68bb2deee8c343572bf446003ec20a583e76f7b15cebf3711"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde"
|
name = "serde"
|
||||||
version = "1.0.138"
|
version = "1.0.143"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1578c6245786b9d168c5447eeacfb96856573ca56c9d68fdcf394be134882a47"
|
checksum = "53e8e5d5b70924f74ff5c6d64d9a5acd91422117c60f48c4e07855238a254553"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_derive"
|
name = "serde_derive"
|
||||||
version = "1.0.138"
|
version = "1.0.143"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "023e9b1467aef8a10fb88f25611870ada9800ef7e22afce356bb0d2387b6f27c"
|
checksum = "d3d8e8de557aee63c26b85b947f5e59b690d0454c753f3adeb5cd7835ab88391"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
@ -1523,9 +1514,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_json"
|
name = "serde_json"
|
||||||
version = "1.0.82"
|
version = "1.0.83"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "82c2c1fdcd807d1098552c5b9a36e425e42e9fbd7c6a37a8425f390f781f7fa7"
|
checksum = "38dd04e3c8279e75b31ef29dbdceebfe5ad89f4d0937213c53f7d49d01b3d5a7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"itoa",
|
"itoa",
|
||||||
|
@ -1535,9 +1526,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_repr"
|
name = "serde_repr"
|
||||||
version = "0.1.8"
|
version = "0.1.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a2ad84e47328a31223de7fed7a4f5087f2d6ddfe586cf3ca25b7a165bc0a5aed"
|
checksum = "1fe39d9fbb0ebf5eb2c7cb7e2a47e4f462fad1379f1166b8ae49ad9eae89a7ca"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
@ -1594,9 +1585,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "1.0.98"
|
version = "1.0.99"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd"
|
checksum = "58dbef6ec655055e20b86b15a8cc6d439cca19b667537ac6a1369572d151ab13"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
@ -1739,9 +1730,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing"
|
name = "tracing"
|
||||||
version = "0.1.35"
|
version = "0.1.36"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a400e31aa60b9d44a52a8ee0343b5b18566b03a8321e0d321f695cf56e940160"
|
checksum = "2fce9567bd60a67d08a16488756721ba392f24f29006402881e43b19aac64307"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
|
@ -1762,9 +1753,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing-core"
|
name = "tracing-core"
|
||||||
version = "0.1.28"
|
version = "0.1.29"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7b7358be39f2f274f322d2aaed611acc57f382e8eb1e5b48cb9ae30933495ce7"
|
checksum = "5aeea4303076558a00714b823f9ad67d58a3bbda1df83d8827d21193156e22f7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"valuable",
|
"valuable",
|
||||||
|
@ -1783,9 +1774,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing-subscriber"
|
name = "tracing-subscriber"
|
||||||
version = "0.3.14"
|
version = "0.3.15"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3a713421342a5a666b7577783721d3117f1b69a393df803ee17bb73b1e122a59"
|
checksum = "60db860322da191b40952ad9affe65ea23e7dd6a5c442c2c42865810c6ab8e6b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"matchers",
|
"matchers",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
|
|
|
@ -77,8 +77,13 @@ impl FlycheckHandle {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Schedule a re-start of the cargo check worker.
|
/// Schedule a re-start of the cargo check worker.
|
||||||
pub fn update(&self) {
|
pub fn restart(&self) {
|
||||||
self.sender.send(Restart).unwrap();
|
self.sender.send(Restart::Yes).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Stop this cargo check worker.
|
||||||
|
pub fn cancel(&self) {
|
||||||
|
self.sender.send(Restart::No).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn id(&self) -> usize {
|
pub fn id(&self) -> usize {
|
||||||
|
@ -122,7 +127,10 @@ pub enum Progress {
|
||||||
DidCancel,
|
DidCancel,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Restart;
|
enum Restart {
|
||||||
|
Yes,
|
||||||
|
No,
|
||||||
|
}
|
||||||
|
|
||||||
struct FlycheckActor {
|
struct FlycheckActor {
|
||||||
id: usize,
|
id: usize,
|
||||||
|
@ -149,6 +157,7 @@ impl FlycheckActor {
|
||||||
config: FlycheckConfig,
|
config: FlycheckConfig,
|
||||||
workspace_root: AbsPathBuf,
|
workspace_root: AbsPathBuf,
|
||||||
) -> FlycheckActor {
|
) -> FlycheckActor {
|
||||||
|
tracing::info!(%id, ?workspace_root, "Spawning flycheck");
|
||||||
FlycheckActor { id, sender, config, workspace_root, cargo_handle: None }
|
FlycheckActor { id, sender, config, workspace_root, cargo_handle: None }
|
||||||
}
|
}
|
||||||
fn progress(&self, progress: Progress) {
|
fn progress(&self, progress: Progress) {
|
||||||
|
@ -164,10 +173,13 @@ impl FlycheckActor {
|
||||||
fn run(mut self, inbox: Receiver<Restart>) {
|
fn run(mut self, inbox: Receiver<Restart>) {
|
||||||
while let Some(event) = self.next_event(&inbox) {
|
while let Some(event) = self.next_event(&inbox) {
|
||||||
match event {
|
match event {
|
||||||
Event::Restart(Restart) => {
|
Event::Restart(Restart::No) => {
|
||||||
|
self.cancel_check_process();
|
||||||
|
}
|
||||||
|
Event::Restart(Restart::Yes) => {
|
||||||
// Cancel the previously spawned process
|
// Cancel the previously spawned process
|
||||||
self.cancel_check_process();
|
self.cancel_check_process();
|
||||||
while let Ok(Restart) = inbox.recv_timeout(Duration::from_millis(50)) {}
|
while let Ok(_) = inbox.recv_timeout(Duration::from_millis(50)) {}
|
||||||
|
|
||||||
let command = self.check_command();
|
let command = self.check_command();
|
||||||
tracing::debug!(?command, "will restart flycheck");
|
tracing::debug!(?command, "will restart flycheck");
|
||||||
|
@ -223,6 +235,10 @@ impl FlycheckActor {
|
||||||
|
|
||||||
fn cancel_check_process(&mut self) {
|
fn cancel_check_process(&mut self) {
|
||||||
if let Some(cargo_handle) = self.cargo_handle.take() {
|
if let Some(cargo_handle) = self.cargo_handle.take() {
|
||||||
|
tracing::debug!(
|
||||||
|
command = ?self.check_command(),
|
||||||
|
"did cancel flycheck"
|
||||||
|
);
|
||||||
cargo_handle.cancel();
|
cargo_handle.cancel();
|
||||||
self.progress(Progress::DidCancel);
|
self.progress(Progress::DidCancel);
|
||||||
}
|
}
|
||||||
|
@ -345,7 +361,7 @@ impl CargoActor {
|
||||||
//
|
//
|
||||||
// Because cargo only outputs one JSON object per line, we can
|
// Because cargo only outputs one JSON object per line, we can
|
||||||
// simply skip a line if it doesn't parse, which just ignores any
|
// simply skip a line if it doesn't parse, which just ignores any
|
||||||
// erroneus output.
|
// erroneous output.
|
||||||
|
|
||||||
let mut error = String::new();
|
let mut error = String::new();
|
||||||
let mut read_at_least_one_message = false;
|
let mut read_at_least_one_message = false;
|
||||||
|
|
|
@ -4,6 +4,7 @@ mod lower;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
pub mod scope;
|
pub mod scope;
|
||||||
|
mod pretty;
|
||||||
|
|
||||||
use std::{ops::Index, sync::Arc};
|
use std::{ops::Index, sync::Arc};
|
||||||
|
|
||||||
|
@ -352,6 +353,10 @@ impl Body {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn pretty_print(&self, db: &dyn DefDatabase, owner: DefWithBodyId) -> String {
|
||||||
|
pretty::print_body_hir(db, self, owner)
|
||||||
|
}
|
||||||
|
|
||||||
fn new(
|
fn new(
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
expander: Expander,
|
expander: Expander,
|
||||||
|
|
|
@ -551,9 +551,17 @@ impl ExprCollector<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::Expr::MacroStmts(e) => {
|
ast::Expr::MacroStmts(e) => {
|
||||||
let statements = e.statements().filter_map(|s| self.collect_stmt(s)).collect();
|
let statements: Box<[_]> =
|
||||||
|
e.statements().filter_map(|s| self.collect_stmt(s)).collect();
|
||||||
let tail = e.expr().map(|e| self.collect_expr(e));
|
let tail = e.expr().map(|e| self.collect_expr(e));
|
||||||
|
|
||||||
|
if e.syntax().children().next().is_none() {
|
||||||
|
// HACK: make sure that macros that expand to nothing aren't treated as a `()`
|
||||||
|
// expression when used in block tail position.
|
||||||
|
cov_mark::hit!(empty_macro_in_trailing_position_is_removed);
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
self.alloc_expr(Expr::MacroStmts { tail, statements }, syntax_ptr)
|
self.alloc_expr(Expr::MacroStmts { tail, statements }, syntax_ptr)
|
||||||
}
|
}
|
||||||
ast::Expr::UnderscoreExpr(_) => self.alloc_expr(Expr::Underscore, syntax_ptr),
|
ast::Expr::UnderscoreExpr(_) => self.alloc_expr(Expr::Underscore, syntax_ptr),
|
||||||
|
|
621
crates/hir-def/src/body/pretty.rs
Normal file
621
crates/hir-def/src/body/pretty.rs
Normal file
|
@ -0,0 +1,621 @@
|
||||||
|
//! A pretty-printer for HIR.
|
||||||
|
|
||||||
|
use std::fmt::{self, Write};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
expr::{Array, BindingAnnotation, Literal, Statement},
|
||||||
|
pretty::{print_generic_args, print_path, print_type_ref},
|
||||||
|
type_ref::TypeRef,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBodyId) -> String {
|
||||||
|
let needs_semi;
|
||||||
|
let header = match owner {
|
||||||
|
DefWithBodyId::FunctionId(it) => {
|
||||||
|
needs_semi = false;
|
||||||
|
let item_tree_id = it.lookup(db).id;
|
||||||
|
format!("fn {}(…) ", item_tree_id.item_tree(db)[item_tree_id.value].name)
|
||||||
|
}
|
||||||
|
DefWithBodyId::StaticId(it) => {
|
||||||
|
needs_semi = true;
|
||||||
|
let item_tree_id = it.lookup(db).id;
|
||||||
|
format!("static {} = ", item_tree_id.item_tree(db)[item_tree_id.value].name)
|
||||||
|
}
|
||||||
|
DefWithBodyId::ConstId(it) => {
|
||||||
|
needs_semi = true;
|
||||||
|
let item_tree_id = it.lookup(db).id;
|
||||||
|
let name = match &item_tree_id.item_tree(db)[item_tree_id.value].name {
|
||||||
|
Some(name) => name.to_string(),
|
||||||
|
None => "_".to_string(),
|
||||||
|
};
|
||||||
|
format!("const {} = ", name)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut p = Printer { body, buf: header, indent_level: 0, needs_indent: false };
|
||||||
|
p.print_expr(body.body_expr);
|
||||||
|
if needs_semi {
|
||||||
|
p.buf.push(';');
|
||||||
|
}
|
||||||
|
p.buf
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! w {
|
||||||
|
($dst:expr, $($arg:tt)*) => {
|
||||||
|
{ let _ = write!($dst, $($arg)*); }
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! wln {
|
||||||
|
($dst:expr) => {
|
||||||
|
{ let _ = writeln!($dst); }
|
||||||
|
};
|
||||||
|
($dst:expr, $($arg:tt)*) => {
|
||||||
|
{ let _ = writeln!($dst, $($arg)*); }
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Printer<'a> {
|
||||||
|
body: &'a Body,
|
||||||
|
buf: String,
|
||||||
|
indent_level: usize,
|
||||||
|
needs_indent: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Write for Printer<'a> {
|
||||||
|
fn write_str(&mut self, s: &str) -> fmt::Result {
|
||||||
|
for line in s.split_inclusive('\n') {
|
||||||
|
if self.needs_indent {
|
||||||
|
match self.buf.chars().rev().skip_while(|ch| *ch == ' ').next() {
|
||||||
|
Some('\n') | None => {}
|
||||||
|
_ => self.buf.push('\n'),
|
||||||
|
}
|
||||||
|
self.buf.push_str(&" ".repeat(self.indent_level));
|
||||||
|
self.needs_indent = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.buf.push_str(line);
|
||||||
|
self.needs_indent = line.ends_with('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Printer<'a> {
|
||||||
|
fn indented(&mut self, f: impl FnOnce(&mut Self)) {
|
||||||
|
self.indent_level += 1;
|
||||||
|
wln!(self);
|
||||||
|
f(self);
|
||||||
|
self.indent_level -= 1;
|
||||||
|
self.buf = self.buf.trim_end_matches('\n').to_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn whitespace(&mut self) {
|
||||||
|
match self.buf.chars().next_back() {
|
||||||
|
None | Some('\n' | ' ') => {}
|
||||||
|
_ => self.buf.push(' '),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn newline(&mut self) {
|
||||||
|
match self.buf.chars().rev().skip_while(|ch| *ch == ' ').next() {
|
||||||
|
Some('\n') | None => {}
|
||||||
|
_ => writeln!(self).unwrap(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_expr(&mut self, expr: ExprId) {
|
||||||
|
let expr = &self.body[expr];
|
||||||
|
|
||||||
|
match expr {
|
||||||
|
Expr::Missing => w!(self, "<EFBFBD>"),
|
||||||
|
Expr::Underscore => w!(self, "_"),
|
||||||
|
Expr::Path(path) => self.print_path(path),
|
||||||
|
Expr::If { condition, then_branch, else_branch } => {
|
||||||
|
w!(self, "if ");
|
||||||
|
self.print_expr(*condition);
|
||||||
|
w!(self, " ");
|
||||||
|
self.print_expr(*then_branch);
|
||||||
|
if let Some(els) = *else_branch {
|
||||||
|
w!(self, " else ");
|
||||||
|
self.print_expr(els);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Expr::Let { pat, expr } => {
|
||||||
|
w!(self, "let ");
|
||||||
|
self.print_pat(*pat);
|
||||||
|
w!(self, " = ");
|
||||||
|
self.print_expr(*expr);
|
||||||
|
}
|
||||||
|
Expr::Loop { body, label } => {
|
||||||
|
if let Some(lbl) = label {
|
||||||
|
w!(self, "{}: ", self.body[*lbl].name);
|
||||||
|
}
|
||||||
|
w!(self, "loop ");
|
||||||
|
self.print_expr(*body);
|
||||||
|
}
|
||||||
|
Expr::While { condition, body, label } => {
|
||||||
|
if let Some(lbl) = label {
|
||||||
|
w!(self, "{}: ", self.body[*lbl].name);
|
||||||
|
}
|
||||||
|
w!(self, "while ");
|
||||||
|
self.print_expr(*condition);
|
||||||
|
self.print_expr(*body);
|
||||||
|
}
|
||||||
|
Expr::For { iterable, pat, body, label } => {
|
||||||
|
if let Some(lbl) = label {
|
||||||
|
w!(self, "{}: ", self.body[*lbl].name);
|
||||||
|
}
|
||||||
|
w!(self, "for ");
|
||||||
|
self.print_pat(*pat);
|
||||||
|
w!(self, " in ");
|
||||||
|
self.print_expr(*iterable);
|
||||||
|
self.print_expr(*body);
|
||||||
|
}
|
||||||
|
Expr::Call { callee, args, is_assignee_expr: _ } => {
|
||||||
|
self.print_expr(*callee);
|
||||||
|
w!(self, "(");
|
||||||
|
if !args.is_empty() {
|
||||||
|
self.indented(|p| {
|
||||||
|
for arg in &**args {
|
||||||
|
p.print_expr(*arg);
|
||||||
|
wln!(p, ",");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
w!(self, ")");
|
||||||
|
}
|
||||||
|
Expr::MethodCall { receiver, method_name, args, generic_args } => {
|
||||||
|
self.print_expr(*receiver);
|
||||||
|
w!(self, ".{}", method_name);
|
||||||
|
if let Some(args) = generic_args {
|
||||||
|
w!(self, "::<");
|
||||||
|
print_generic_args(args, self).unwrap();
|
||||||
|
w!(self, ">");
|
||||||
|
}
|
||||||
|
w!(self, "(");
|
||||||
|
if !args.is_empty() {
|
||||||
|
self.indented(|p| {
|
||||||
|
for arg in &**args {
|
||||||
|
p.print_expr(*arg);
|
||||||
|
wln!(p, ",");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
w!(self, ")");
|
||||||
|
}
|
||||||
|
Expr::Match { expr, arms } => {
|
||||||
|
w!(self, "match ");
|
||||||
|
self.print_expr(*expr);
|
||||||
|
w!(self, " {{");
|
||||||
|
self.indented(|p| {
|
||||||
|
for arm in &**arms {
|
||||||
|
p.print_pat(arm.pat);
|
||||||
|
if let Some(guard) = arm.guard {
|
||||||
|
w!(p, " if ");
|
||||||
|
p.print_expr(guard);
|
||||||
|
}
|
||||||
|
w!(p, " => ");
|
||||||
|
p.print_expr(arm.expr);
|
||||||
|
wln!(p, ",");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
wln!(self, "}}");
|
||||||
|
}
|
||||||
|
Expr::Continue { label } => {
|
||||||
|
w!(self, "continue");
|
||||||
|
if let Some(label) = label {
|
||||||
|
w!(self, " {}", label);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Expr::Break { expr, label } => {
|
||||||
|
w!(self, "break");
|
||||||
|
if let Some(label) = label {
|
||||||
|
w!(self, " {}", label);
|
||||||
|
}
|
||||||
|
if let Some(expr) = expr {
|
||||||
|
self.whitespace();
|
||||||
|
self.print_expr(*expr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Expr::Return { expr } => {
|
||||||
|
w!(self, "return");
|
||||||
|
if let Some(expr) = expr {
|
||||||
|
self.whitespace();
|
||||||
|
self.print_expr(*expr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Expr::Yield { expr } => {
|
||||||
|
w!(self, "yield");
|
||||||
|
if let Some(expr) = expr {
|
||||||
|
self.whitespace();
|
||||||
|
self.print_expr(*expr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Expr::RecordLit { path, fields, spread, ellipsis, is_assignee_expr: _ } => {
|
||||||
|
match path {
|
||||||
|
Some(path) => self.print_path(path),
|
||||||
|
None => w!(self, "<EFBFBD>"),
|
||||||
|
}
|
||||||
|
|
||||||
|
w!(self, "{{");
|
||||||
|
self.indented(|p| {
|
||||||
|
for field in &**fields {
|
||||||
|
w!(p, "{}: ", field.name);
|
||||||
|
p.print_expr(field.expr);
|
||||||
|
wln!(p, ",");
|
||||||
|
}
|
||||||
|
if let Some(spread) = spread {
|
||||||
|
w!(p, "..");
|
||||||
|
p.print_expr(*spread);
|
||||||
|
wln!(p);
|
||||||
|
}
|
||||||
|
if *ellipsis {
|
||||||
|
wln!(p, "..");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
w!(self, "}}");
|
||||||
|
}
|
||||||
|
Expr::Field { expr, name } => {
|
||||||
|
self.print_expr(*expr);
|
||||||
|
w!(self, ".{}", name);
|
||||||
|
}
|
||||||
|
Expr::Await { expr } => {
|
||||||
|
self.print_expr(*expr);
|
||||||
|
w!(self, ".await");
|
||||||
|
}
|
||||||
|
Expr::Try { expr } => {
|
||||||
|
self.print_expr(*expr);
|
||||||
|
w!(self, "?");
|
||||||
|
}
|
||||||
|
Expr::TryBlock { body } => {
|
||||||
|
w!(self, "try ");
|
||||||
|
self.print_expr(*body);
|
||||||
|
}
|
||||||
|
Expr::Async { body } => {
|
||||||
|
w!(self, "async ");
|
||||||
|
self.print_expr(*body);
|
||||||
|
}
|
||||||
|
Expr::Const { body } => {
|
||||||
|
w!(self, "const ");
|
||||||
|
self.print_expr(*body);
|
||||||
|
}
|
||||||
|
Expr::Cast { expr, type_ref } => {
|
||||||
|
self.print_expr(*expr);
|
||||||
|
w!(self, " as ");
|
||||||
|
self.print_type_ref(type_ref);
|
||||||
|
}
|
||||||
|
Expr::Ref { expr, rawness, mutability } => {
|
||||||
|
w!(self, "&");
|
||||||
|
if rawness.is_raw() {
|
||||||
|
w!(self, "raw ");
|
||||||
|
}
|
||||||
|
if mutability.is_mut() {
|
||||||
|
w!(self, "mut ");
|
||||||
|
}
|
||||||
|
self.print_expr(*expr);
|
||||||
|
}
|
||||||
|
Expr::Box { expr } => {
|
||||||
|
w!(self, "box ");
|
||||||
|
self.print_expr(*expr);
|
||||||
|
}
|
||||||
|
Expr::UnaryOp { expr, op } => {
|
||||||
|
let op = match op {
|
||||||
|
ast::UnaryOp::Deref => "*",
|
||||||
|
ast::UnaryOp::Not => "!",
|
||||||
|
ast::UnaryOp::Neg => "-",
|
||||||
|
};
|
||||||
|
w!(self, "{}", op);
|
||||||
|
self.print_expr(*expr);
|
||||||
|
}
|
||||||
|
Expr::BinaryOp { lhs, rhs, op } => {
|
||||||
|
let (bra, ket) = match op {
|
||||||
|
None | Some(ast::BinaryOp::Assignment { .. }) => ("", ""),
|
||||||
|
_ => ("(", ")"),
|
||||||
|
};
|
||||||
|
w!(self, "{}", bra);
|
||||||
|
self.print_expr(*lhs);
|
||||||
|
w!(self, "{} ", ket);
|
||||||
|
match op {
|
||||||
|
Some(op) => w!(self, "{}", op),
|
||||||
|
None => w!(self, "<EFBFBD>"), // :)
|
||||||
|
}
|
||||||
|
w!(self, " {}", bra);
|
||||||
|
self.print_expr(*rhs);
|
||||||
|
w!(self, "{}", ket);
|
||||||
|
}
|
||||||
|
Expr::Range { lhs, rhs, range_type } => {
|
||||||
|
if let Some(lhs) = lhs {
|
||||||
|
w!(self, "(");
|
||||||
|
self.print_expr(*lhs);
|
||||||
|
w!(self, ") ");
|
||||||
|
}
|
||||||
|
let range = match range_type {
|
||||||
|
ast::RangeOp::Exclusive => "..",
|
||||||
|
ast::RangeOp::Inclusive => "..=",
|
||||||
|
};
|
||||||
|
w!(self, "{}", range);
|
||||||
|
if let Some(rhs) = rhs {
|
||||||
|
w!(self, "(");
|
||||||
|
self.print_expr(*rhs);
|
||||||
|
w!(self, ") ");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Expr::Index { base, index } => {
|
||||||
|
self.print_expr(*base);
|
||||||
|
w!(self, "[");
|
||||||
|
self.print_expr(*index);
|
||||||
|
w!(self, "]");
|
||||||
|
}
|
||||||
|
Expr::Closure { args, arg_types, ret_type, body } => {
|
||||||
|
w!(self, "|");
|
||||||
|
for (i, (pat, ty)) in args.iter().zip(arg_types.iter()).enumerate() {
|
||||||
|
if i != 0 {
|
||||||
|
w!(self, ", ");
|
||||||
|
}
|
||||||
|
self.print_pat(*pat);
|
||||||
|
if let Some(ty) = ty {
|
||||||
|
w!(self, ": ");
|
||||||
|
self.print_type_ref(ty);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
w!(self, "|");
|
||||||
|
if let Some(ret_ty) = ret_type {
|
||||||
|
w!(self, " -> ");
|
||||||
|
self.print_type_ref(ret_ty);
|
||||||
|
}
|
||||||
|
self.whitespace();
|
||||||
|
self.print_expr(*body);
|
||||||
|
}
|
||||||
|
Expr::Tuple { exprs, is_assignee_expr: _ } => {
|
||||||
|
w!(self, "(");
|
||||||
|
for expr in exprs.iter() {
|
||||||
|
self.print_expr(*expr);
|
||||||
|
w!(self, ", ");
|
||||||
|
}
|
||||||
|
w!(self, ")");
|
||||||
|
}
|
||||||
|
Expr::Unsafe { body } => {
|
||||||
|
w!(self, "unsafe ");
|
||||||
|
self.print_expr(*body);
|
||||||
|
}
|
||||||
|
Expr::Array(arr) => {
|
||||||
|
w!(self, "[");
|
||||||
|
if !matches!(arr, Array::ElementList { elements, .. } if elements.is_empty()) {
|
||||||
|
self.indented(|p| match arr {
|
||||||
|
Array::ElementList { elements, is_assignee_expr: _ } => {
|
||||||
|
for elem in elements.iter() {
|
||||||
|
p.print_expr(*elem);
|
||||||
|
w!(p, ", ");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Array::Repeat { initializer, repeat } => {
|
||||||
|
p.print_expr(*initializer);
|
||||||
|
w!(p, "; ");
|
||||||
|
p.print_expr(*repeat);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
self.newline();
|
||||||
|
}
|
||||||
|
w!(self, "]");
|
||||||
|
}
|
||||||
|
Expr::Literal(lit) => self.print_literal(lit),
|
||||||
|
Expr::Block { id: _, statements, tail, label } => {
|
||||||
|
self.whitespace();
|
||||||
|
if let Some(lbl) = label {
|
||||||
|
w!(self, "{}: ", self.body[*lbl].name);
|
||||||
|
}
|
||||||
|
w!(self, "{{");
|
||||||
|
if !statements.is_empty() || tail.is_some() {
|
||||||
|
self.indented(|p| {
|
||||||
|
for stmt in &**statements {
|
||||||
|
p.print_stmt(stmt);
|
||||||
|
}
|
||||||
|
if let Some(tail) = tail {
|
||||||
|
p.print_expr(*tail);
|
||||||
|
}
|
||||||
|
p.newline();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
w!(self, "}}");
|
||||||
|
}
|
||||||
|
Expr::MacroStmts { statements, tail } => {
|
||||||
|
w!(self, "{{ // macro statements");
|
||||||
|
self.indented(|p| {
|
||||||
|
for stmt in statements.iter() {
|
||||||
|
p.print_stmt(stmt);
|
||||||
|
}
|
||||||
|
if let Some(tail) = tail {
|
||||||
|
p.print_expr(*tail);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
self.newline();
|
||||||
|
w!(self, "}}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_pat(&mut self, pat: PatId) {
|
||||||
|
let pat = &self.body[pat];
|
||||||
|
|
||||||
|
match pat {
|
||||||
|
Pat::Missing => w!(self, "<EFBFBD>"),
|
||||||
|
Pat::Wild => w!(self, "_"),
|
||||||
|
Pat::Tuple { args, ellipsis } => {
|
||||||
|
w!(self, "(");
|
||||||
|
for (i, pat) in args.iter().enumerate() {
|
||||||
|
if i != 0 {
|
||||||
|
w!(self, ", ");
|
||||||
|
}
|
||||||
|
if *ellipsis == Some(i) {
|
||||||
|
w!(self, ".., ");
|
||||||
|
}
|
||||||
|
self.print_pat(*pat);
|
||||||
|
}
|
||||||
|
w!(self, ")");
|
||||||
|
}
|
||||||
|
Pat::Or(pats) => {
|
||||||
|
for (i, pat) in pats.iter().enumerate() {
|
||||||
|
if i != 0 {
|
||||||
|
w!(self, " | ");
|
||||||
|
}
|
||||||
|
self.print_pat(*pat);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Pat::Record { path, args, ellipsis } => {
|
||||||
|
match path {
|
||||||
|
Some(path) => self.print_path(path),
|
||||||
|
None => w!(self, "<EFBFBD>"),
|
||||||
|
}
|
||||||
|
|
||||||
|
w!(self, " {{");
|
||||||
|
self.indented(|p| {
|
||||||
|
for arg in args.iter() {
|
||||||
|
w!(p, "{}: ", arg.name);
|
||||||
|
p.print_pat(arg.pat);
|
||||||
|
wln!(p, ",");
|
||||||
|
}
|
||||||
|
if *ellipsis {
|
||||||
|
wln!(p, "..");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
w!(self, "}}");
|
||||||
|
}
|
||||||
|
Pat::Range { start, end } => {
|
||||||
|
self.print_expr(*start);
|
||||||
|
w!(self, "...");
|
||||||
|
self.print_expr(*end);
|
||||||
|
}
|
||||||
|
Pat::Slice { prefix, slice, suffix } => {
|
||||||
|
w!(self, "[");
|
||||||
|
for pat in prefix.iter() {
|
||||||
|
self.print_pat(*pat);
|
||||||
|
w!(self, ", ");
|
||||||
|
}
|
||||||
|
if let Some(pat) = slice {
|
||||||
|
self.print_pat(*pat);
|
||||||
|
w!(self, ", ");
|
||||||
|
}
|
||||||
|
for pat in suffix.iter() {
|
||||||
|
self.print_pat(*pat);
|
||||||
|
w!(self, ", ");
|
||||||
|
}
|
||||||
|
w!(self, "]");
|
||||||
|
}
|
||||||
|
Pat::Path(path) => self.print_path(path),
|
||||||
|
Pat::Lit(expr) => self.print_expr(*expr),
|
||||||
|
Pat::Bind { mode, name, subpat } => {
|
||||||
|
let mode = match mode {
|
||||||
|
BindingAnnotation::Unannotated => "",
|
||||||
|
BindingAnnotation::Mutable => "mut ",
|
||||||
|
BindingAnnotation::Ref => "ref ",
|
||||||
|
BindingAnnotation::RefMut => "ref mut ",
|
||||||
|
};
|
||||||
|
w!(self, "{}{}", mode, name);
|
||||||
|
if let Some(pat) = subpat {
|
||||||
|
self.whitespace();
|
||||||
|
self.print_pat(*pat);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Pat::TupleStruct { path, args, ellipsis } => {
|
||||||
|
match path {
|
||||||
|
Some(path) => self.print_path(path),
|
||||||
|
None => w!(self, "<EFBFBD>"),
|
||||||
|
}
|
||||||
|
w!(self, "(");
|
||||||
|
for (i, arg) in args.iter().enumerate() {
|
||||||
|
if i != 0 {
|
||||||
|
w!(self, ", ");
|
||||||
|
}
|
||||||
|
if *ellipsis == Some(i) {
|
||||||
|
w!(self, ", ..");
|
||||||
|
}
|
||||||
|
self.print_pat(*arg);
|
||||||
|
}
|
||||||
|
w!(self, ")");
|
||||||
|
}
|
||||||
|
Pat::Ref { pat, mutability } => {
|
||||||
|
w!(self, "&");
|
||||||
|
if mutability.is_mut() {
|
||||||
|
w!(self, "mut ");
|
||||||
|
}
|
||||||
|
self.print_pat(*pat);
|
||||||
|
}
|
||||||
|
Pat::Box { inner } => {
|
||||||
|
w!(self, "box ");
|
||||||
|
self.print_pat(*inner);
|
||||||
|
}
|
||||||
|
Pat::ConstBlock(c) => {
|
||||||
|
w!(self, "const ");
|
||||||
|
self.print_expr(*c);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_stmt(&mut self, stmt: &Statement) {
|
||||||
|
match stmt {
|
||||||
|
Statement::Let { pat, type_ref, initializer, else_branch } => {
|
||||||
|
w!(self, "let ");
|
||||||
|
self.print_pat(*pat);
|
||||||
|
if let Some(ty) = type_ref {
|
||||||
|
w!(self, ": ");
|
||||||
|
self.print_type_ref(ty);
|
||||||
|
}
|
||||||
|
if let Some(init) = initializer {
|
||||||
|
w!(self, " = ");
|
||||||
|
self.print_expr(*init);
|
||||||
|
}
|
||||||
|
if let Some(els) = else_branch {
|
||||||
|
w!(self, " else ");
|
||||||
|
self.print_expr(*els);
|
||||||
|
}
|
||||||
|
wln!(self, ";");
|
||||||
|
}
|
||||||
|
Statement::Expr { expr, has_semi } => {
|
||||||
|
self.print_expr(*expr);
|
||||||
|
if *has_semi {
|
||||||
|
w!(self, ";");
|
||||||
|
}
|
||||||
|
wln!(self);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_literal(&mut self, literal: &Literal) {
|
||||||
|
match literal {
|
||||||
|
Literal::String(it) => w!(self, "{:?}", it),
|
||||||
|
Literal::ByteString(it) => w!(self, "\"{}\"", it.escape_ascii()),
|
||||||
|
Literal::Char(it) => w!(self, "'{}'", it.escape_debug()),
|
||||||
|
Literal::Bool(it) => w!(self, "{}", it),
|
||||||
|
Literal::Int(i, suffix) => {
|
||||||
|
w!(self, "{}", i);
|
||||||
|
if let Some(suffix) = suffix {
|
||||||
|
w!(self, "{}", suffix);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Literal::Uint(i, suffix) => {
|
||||||
|
w!(self, "{}", i);
|
||||||
|
if let Some(suffix) = suffix {
|
||||||
|
w!(self, "{}", suffix);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Literal::Float(f, suffix) => {
|
||||||
|
w!(self, "{}", f);
|
||||||
|
if let Some(suffix) = suffix {
|
||||||
|
w!(self, "{}", suffix);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_type_ref(&mut self, ty: &TypeRef) {
|
||||||
|
print_type_ref(ty, self).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_path(&mut self, path: &Path) {
|
||||||
|
print_path(path, self).unwrap();
|
||||||
|
}
|
||||||
|
}
|
|
@ -156,3 +156,38 @@ impl BuiltinFloat {
|
||||||
Some(res)
|
Some(res)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for BuiltinInt {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.write_str(match self {
|
||||||
|
BuiltinInt::Isize => "isize",
|
||||||
|
BuiltinInt::I8 => "i8",
|
||||||
|
BuiltinInt::I16 => "i16",
|
||||||
|
BuiltinInt::I32 => "i32",
|
||||||
|
BuiltinInt::I64 => "i64",
|
||||||
|
BuiltinInt::I128 => "i128",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for BuiltinUint {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.write_str(match self {
|
||||||
|
BuiltinUint::Usize => "usize",
|
||||||
|
BuiltinUint::U8 => "u8",
|
||||||
|
BuiltinUint::U16 => "u16",
|
||||||
|
BuiltinUint::U32 => "u32",
|
||||||
|
BuiltinUint::U64 => "u64",
|
||||||
|
BuiltinUint::U128 => "u128",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for BuiltinFloat {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.write_str(match self {
|
||||||
|
BuiltinFloat::F32 => "f32",
|
||||||
|
BuiltinFloat::F64 => "f64",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -12,6 +12,8 @@
|
||||||
//!
|
//!
|
||||||
//! See also a neighboring `body` module.
|
//! See also a neighboring `body` module.
|
||||||
|
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
use la_arena::{Idx, RawIdx};
|
use la_arena::{Idx, RawIdx};
|
||||||
|
|
||||||
|
@ -52,8 +54,8 @@ impl FloatTypeWrapper {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for FloatTypeWrapper {
|
impl fmt::Display for FloatTypeWrapper {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
write!(f, "{:?}", f64::from_bits(self.0))
|
write!(f, "{:?}", f64::from_bits(self.0))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,13 +2,10 @@
|
||||||
|
|
||||||
use std::fmt::{self, Write};
|
use std::fmt::{self, Write};
|
||||||
|
|
||||||
use itertools::Itertools;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
attr::RawAttrs,
|
attr::RawAttrs,
|
||||||
generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget},
|
generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget},
|
||||||
path::GenericArg,
|
pretty::{print_path, print_type_bounds, print_type_ref},
|
||||||
type_ref::TraitBoundModifier,
|
|
||||||
visibility::RawVisibility,
|
visibility::RawVisibility,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -464,183 +461,15 @@ impl<'a> Printer<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_type_ref(&mut self, type_ref: &TypeRef) {
|
fn print_type_ref(&mut self, type_ref: &TypeRef) {
|
||||||
// FIXME: deduplicate with `HirDisplay` impl
|
print_type_ref(type_ref, self).unwrap();
|
||||||
match type_ref {
|
|
||||||
TypeRef::Never => w!(self, "!"),
|
|
||||||
TypeRef::Placeholder => w!(self, "_"),
|
|
||||||
TypeRef::Tuple(fields) => {
|
|
||||||
w!(self, "(");
|
|
||||||
for (i, field) in fields.iter().enumerate() {
|
|
||||||
if i != 0 {
|
|
||||||
w!(self, ", ");
|
|
||||||
}
|
|
||||||
self.print_type_ref(field);
|
|
||||||
}
|
|
||||||
w!(self, ")");
|
|
||||||
}
|
|
||||||
TypeRef::Path(path) => self.print_path(path),
|
|
||||||
TypeRef::RawPtr(pointee, mtbl) => {
|
|
||||||
let mtbl = match mtbl {
|
|
||||||
Mutability::Shared => "*const",
|
|
||||||
Mutability::Mut => "*mut",
|
|
||||||
};
|
|
||||||
w!(self, "{} ", mtbl);
|
|
||||||
self.print_type_ref(pointee);
|
|
||||||
}
|
|
||||||
TypeRef::Reference(pointee, lt, mtbl) => {
|
|
||||||
let mtbl = match mtbl {
|
|
||||||
Mutability::Shared => "",
|
|
||||||
Mutability::Mut => "mut ",
|
|
||||||
};
|
|
||||||
w!(self, "&");
|
|
||||||
if let Some(lt) = lt {
|
|
||||||
w!(self, "{} ", lt.name);
|
|
||||||
}
|
|
||||||
w!(self, "{}", mtbl);
|
|
||||||
self.print_type_ref(pointee);
|
|
||||||
}
|
|
||||||
TypeRef::Array(elem, len) => {
|
|
||||||
w!(self, "[");
|
|
||||||
self.print_type_ref(elem);
|
|
||||||
w!(self, "; {}]", len);
|
|
||||||
}
|
|
||||||
TypeRef::Slice(elem) => {
|
|
||||||
w!(self, "[");
|
|
||||||
self.print_type_ref(elem);
|
|
||||||
w!(self, "]");
|
|
||||||
}
|
|
||||||
TypeRef::Fn(args_and_ret, varargs) => {
|
|
||||||
let ((_, return_type), args) =
|
|
||||||
args_and_ret.split_last().expect("TypeRef::Fn is missing return type");
|
|
||||||
w!(self, "fn(");
|
|
||||||
for (i, (_, typeref)) in args.iter().enumerate() {
|
|
||||||
if i != 0 {
|
|
||||||
w!(self, ", ");
|
|
||||||
}
|
|
||||||
self.print_type_ref(typeref);
|
|
||||||
}
|
|
||||||
if *varargs {
|
|
||||||
if !args.is_empty() {
|
|
||||||
w!(self, ", ");
|
|
||||||
}
|
|
||||||
w!(self, "...");
|
|
||||||
}
|
|
||||||
w!(self, ") -> ");
|
|
||||||
self.print_type_ref(return_type);
|
|
||||||
}
|
|
||||||
TypeRef::Macro(_ast_id) => {
|
|
||||||
w!(self, "<macro>");
|
|
||||||
}
|
|
||||||
TypeRef::Error => w!(self, "{{unknown}}"),
|
|
||||||
TypeRef::ImplTrait(bounds) => {
|
|
||||||
w!(self, "impl ");
|
|
||||||
self.print_type_bounds(bounds);
|
|
||||||
}
|
|
||||||
TypeRef::DynTrait(bounds) => {
|
|
||||||
w!(self, "dyn ");
|
|
||||||
self.print_type_bounds(bounds);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_type_bounds(&mut self, bounds: &[Interned<TypeBound>]) {
|
fn print_type_bounds(&mut self, bounds: &[Interned<TypeBound>]) {
|
||||||
for (i, bound) in bounds.iter().enumerate() {
|
print_type_bounds(bounds, self).unwrap();
|
||||||
if i != 0 {
|
|
||||||
w!(self, " + ");
|
|
||||||
}
|
|
||||||
|
|
||||||
match bound.as_ref() {
|
|
||||||
TypeBound::Path(path, modifier) => {
|
|
||||||
match modifier {
|
|
||||||
TraitBoundModifier::None => (),
|
|
||||||
TraitBoundModifier::Maybe => w!(self, "?"),
|
|
||||||
}
|
|
||||||
self.print_path(path)
|
|
||||||
}
|
|
||||||
TypeBound::ForLifetime(lifetimes, path) => {
|
|
||||||
w!(self, "for<{}> ", lifetimes.iter().format(", "));
|
|
||||||
self.print_path(path);
|
|
||||||
}
|
|
||||||
TypeBound::Lifetime(lt) => w!(self, "{}", lt.name),
|
|
||||||
TypeBound::Error => w!(self, "{{unknown}}"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_path(&mut self, path: &Path) {
|
fn print_path(&mut self, path: &Path) {
|
||||||
match path.type_anchor() {
|
print_path(path, self).unwrap();
|
||||||
Some(anchor) => {
|
|
||||||
w!(self, "<");
|
|
||||||
self.print_type_ref(anchor);
|
|
||||||
w!(self, ">::");
|
|
||||||
}
|
|
||||||
None => match path.kind() {
|
|
||||||
PathKind::Plain => {}
|
|
||||||
PathKind::Super(0) => w!(self, "self::"),
|
|
||||||
PathKind::Super(n) => {
|
|
||||||
for _ in 0..*n {
|
|
||||||
w!(self, "super::");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
PathKind::Crate => w!(self, "crate::"),
|
|
||||||
PathKind::Abs => w!(self, "::"),
|
|
||||||
PathKind::DollarCrate(_) => w!(self, "$crate::"),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for (i, segment) in path.segments().iter().enumerate() {
|
|
||||||
if i != 0 {
|
|
||||||
w!(self, "::");
|
|
||||||
}
|
|
||||||
|
|
||||||
w!(self, "{}", segment.name);
|
|
||||||
if let Some(generics) = segment.args_and_bindings {
|
|
||||||
// NB: these are all in type position, so `::<` turbofish syntax is not necessary
|
|
||||||
w!(self, "<");
|
|
||||||
let mut first = true;
|
|
||||||
let args = if generics.has_self_type {
|
|
||||||
let (self_ty, args) = generics.args.split_first().unwrap();
|
|
||||||
w!(self, "Self=");
|
|
||||||
self.print_generic_arg(self_ty);
|
|
||||||
first = false;
|
|
||||||
args
|
|
||||||
} else {
|
|
||||||
&generics.args
|
|
||||||
};
|
|
||||||
for arg in args {
|
|
||||||
if !first {
|
|
||||||
w!(self, ", ");
|
|
||||||
}
|
|
||||||
first = false;
|
|
||||||
self.print_generic_arg(arg);
|
|
||||||
}
|
|
||||||
for binding in &generics.bindings {
|
|
||||||
if !first {
|
|
||||||
w!(self, ", ");
|
|
||||||
}
|
|
||||||
first = false;
|
|
||||||
w!(self, "{}", binding.name);
|
|
||||||
if !binding.bounds.is_empty() {
|
|
||||||
w!(self, ": ");
|
|
||||||
self.print_type_bounds(&binding.bounds);
|
|
||||||
}
|
|
||||||
if let Some(ty) = &binding.type_ref {
|
|
||||||
w!(self, " = ");
|
|
||||||
self.print_type_ref(ty);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
w!(self, ">");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn print_generic_arg(&mut self, arg: &GenericArg) {
|
|
||||||
match arg {
|
|
||||||
GenericArg::Type(ty) => self.print_type_ref(ty),
|
|
||||||
GenericArg::Const(c) => w!(self, "{}", c),
|
|
||||||
GenericArg::Lifetime(lt) => w!(self, "{}", lt.name),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_generic_params(&mut self, params: &GenericParams) {
|
fn print_generic_params(&mut self, params: &GenericParams) {
|
||||||
|
|
|
@ -283,10 +283,10 @@ struct S {
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
pub(self) struct S {
|
pub(self) struct S {
|
||||||
pub(self) a: Mixed<'a, T, Item = (), OtherItem = u8>,
|
pub(self) a: Mixed::<'a, T, Item = (), OtherItem = u8>,
|
||||||
pub(self) b: Qualified<Self=Fully>::Syntax,
|
pub(self) b: Qualified::<Self=Fully>::Syntax,
|
||||||
pub(self) c: <TypeAnchored>::Path<'a>,
|
pub(self) c: <TypeAnchored>::Path::<'a>,
|
||||||
pub(self) d: dyn for<'a> Trait<'a>,
|
pub(self) d: dyn for<'a> Trait::<'a>,
|
||||||
}
|
}
|
||||||
"#]],
|
"#]],
|
||||||
)
|
)
|
||||||
|
@ -329,7 +329,7 @@ trait Tr<'a, T: 'a>: Super where Self: for<'a> Tr<'a, T> {}
|
||||||
T: Copy,
|
T: Copy,
|
||||||
U: ?Sized;
|
U: ?Sized;
|
||||||
|
|
||||||
impl<'a, 'b, T, const K: u8> S<'a, 'b, T, K>
|
impl<'a, 'b, T, const K: u8> S::<'a, 'b, T, K>
|
||||||
where
|
where
|
||||||
T: Copy,
|
T: Copy,
|
||||||
T: 'a,
|
T: 'a,
|
||||||
|
@ -352,7 +352,7 @@ trait Tr<'a, T: 'a>: Super where Self: for<'a> Tr<'a, T> {}
|
||||||
where
|
where
|
||||||
Self: Super,
|
Self: Super,
|
||||||
T: 'a,
|
T: 'a,
|
||||||
Self: for<'a> Tr<'a, T>
|
Self: for<'a> Tr::<'a, T>
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
"#]],
|
"#]],
|
||||||
|
|
|
@ -53,6 +53,7 @@ pub mod import_map;
|
||||||
mod test_db;
|
mod test_db;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod macro_expansion_tests;
|
mod macro_expansion_tests;
|
||||||
|
mod pretty;
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
hash::{Hash, Hasher},
|
hash::{Hash, Hasher},
|
||||||
|
|
|
@ -399,14 +399,15 @@ impl DefMap {
|
||||||
Some(_) | None => from_scope.or(from_builtin),
|
Some(_) | None => from_scope.or(from_builtin),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
let from_extern_prelude = self
|
|
||||||
.extern_prelude
|
|
||||||
.get(name)
|
|
||||||
.map_or(PerNs::none(), |&it| PerNs::types(it.into(), Visibility::Public));
|
|
||||||
|
|
||||||
let from_prelude = self.resolve_in_prelude(db, name);
|
let extern_prelude = || {
|
||||||
|
self.extern_prelude
|
||||||
|
.get(name)
|
||||||
|
.map_or(PerNs::none(), |&it| PerNs::types(it.into(), Visibility::Public))
|
||||||
|
};
|
||||||
|
let prelude = || self.resolve_in_prelude(db, name);
|
||||||
|
|
||||||
from_legacy_macro.or(from_scope_or_builtin).or(from_extern_prelude).or(from_prelude)
|
from_legacy_macro.or(from_scope_or_builtin).or_else(extern_prelude).or_else(prelude)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_name_in_crate_root_or_extern_prelude(
|
fn resolve_name_in_crate_root_or_extern_prelude(
|
||||||
|
@ -414,20 +415,19 @@ impl DefMap {
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
name: &Name,
|
name: &Name,
|
||||||
) -> PerNs {
|
) -> PerNs {
|
||||||
let arc;
|
let from_crate_root = match self.block {
|
||||||
let crate_def_map = match self.block {
|
|
||||||
Some(_) => {
|
Some(_) => {
|
||||||
arc = self.crate_root(db).def_map(db);
|
let def_map = self.crate_root(db).def_map(db);
|
||||||
&arc
|
def_map[def_map.root].scope.get(name)
|
||||||
}
|
}
|
||||||
None => self,
|
None => self[self.root].scope.get(name),
|
||||||
|
};
|
||||||
|
let from_extern_prelude = || {
|
||||||
|
self.resolve_name_in_extern_prelude(db, name)
|
||||||
|
.map_or(PerNs::none(), |it| PerNs::types(it.into(), Visibility::Public))
|
||||||
};
|
};
|
||||||
let from_crate_root = crate_def_map[crate_def_map.root].scope.get(name);
|
|
||||||
let from_extern_prelude = self
|
|
||||||
.resolve_name_in_extern_prelude(db, name)
|
|
||||||
.map_or(PerNs::none(), |it| PerNs::types(it.into(), Visibility::Public));
|
|
||||||
|
|
||||||
from_crate_root.or(from_extern_prelude)
|
from_crate_root.or_else(from_extern_prelude)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_in_prelude(&self, db: &dyn DefDatabase, name: &Name) -> PerNs {
|
fn resolve_in_prelude(&self, db: &dyn DefDatabase, name: &Name) -> PerNs {
|
||||||
|
|
|
@ -45,7 +45,7 @@ impl Attrs {
|
||||||
kind: ProcMacroKind::CustomDerive { helpers: Box::new([]) },
|
kind: ProcMacroKind::CustomDerive { helpers: Box::new([]) },
|
||||||
}),
|
}),
|
||||||
|
|
||||||
// `#[proc_macro_derive(Trait, attibutes(helper1, helper2, ...))]`
|
// `#[proc_macro_derive(Trait, attributes(helper1, helper2, ...))]`
|
||||||
[
|
[
|
||||||
TokenTree::Leaf(Leaf::Ident(trait_name)),
|
TokenTree::Leaf(Leaf::Ident(trait_name)),
|
||||||
TokenTree::Leaf(Leaf::Punct(comma)),
|
TokenTree::Leaf(Leaf::Punct(comma)),
|
||||||
|
|
|
@ -43,6 +43,10 @@ impl PerNs {
|
||||||
self.types.is_none() && self.values.is_none() && self.macros.is_none()
|
self.types.is_none() && self.values.is_none() && self.macros.is_none()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_full(&self) -> bool {
|
||||||
|
self.types.is_some() && self.values.is_some() && self.macros.is_some()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn take_types(self) -> Option<ModuleDefId> {
|
pub fn take_types(self) -> Option<ModuleDefId> {
|
||||||
self.types.map(|it| it.0)
|
self.types.map(|it| it.0)
|
||||||
}
|
}
|
||||||
|
@ -84,6 +88,14 @@ impl PerNs {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn or_else(self, f: impl FnOnce() -> PerNs) -> PerNs {
|
||||||
|
if self.is_full() {
|
||||||
|
self
|
||||||
|
} else {
|
||||||
|
self.or(f())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn iter_items(self) -> impl Iterator<Item = ItemInNs> {
|
pub fn iter_items(self) -> impl Iterator<Item = ItemInNs> {
|
||||||
let _p = profile::span("PerNs::iter_items");
|
let _p = profile::span("PerNs::iter_items");
|
||||||
self.types
|
self.types
|
||||||
|
|
209
crates/hir-def/src/pretty.rs
Normal file
209
crates/hir-def/src/pretty.rs
Normal file
|
@ -0,0 +1,209 @@
|
||||||
|
//! Display and pretty printing routines.
|
||||||
|
|
||||||
|
use std::fmt::{self, Write};
|
||||||
|
|
||||||
|
use hir_expand::mod_path::PathKind;
|
||||||
|
use itertools::Itertools;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
intern::Interned,
|
||||||
|
path::{GenericArg, GenericArgs, Path},
|
||||||
|
type_ref::{Mutability, TraitBoundModifier, TypeBound, TypeRef},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub(crate) fn print_path(path: &Path, buf: &mut dyn Write) -> fmt::Result {
|
||||||
|
match path.type_anchor() {
|
||||||
|
Some(anchor) => {
|
||||||
|
write!(buf, "<")?;
|
||||||
|
print_type_ref(anchor, buf)?;
|
||||||
|
write!(buf, ">::")?;
|
||||||
|
}
|
||||||
|
None => match path.kind() {
|
||||||
|
PathKind::Plain => {}
|
||||||
|
PathKind::Super(0) => write!(buf, "self")?,
|
||||||
|
PathKind::Super(n) => {
|
||||||
|
for i in 0..*n {
|
||||||
|
if i == 0 {
|
||||||
|
buf.write_str("super")?;
|
||||||
|
} else {
|
||||||
|
buf.write_str("::super")?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
PathKind::Crate => write!(buf, "crate")?,
|
||||||
|
PathKind::Abs => {}
|
||||||
|
PathKind::DollarCrate(_) => write!(buf, "$crate")?,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for (i, segment) in path.segments().iter().enumerate() {
|
||||||
|
if i != 0 || !matches!(path.kind(), PathKind::Plain) {
|
||||||
|
write!(buf, "::")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
write!(buf, "{}", segment.name)?;
|
||||||
|
if let Some(generics) = segment.args_and_bindings {
|
||||||
|
write!(buf, "::<")?;
|
||||||
|
print_generic_args(generics, buf)?;
|
||||||
|
|
||||||
|
write!(buf, ">")?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn print_generic_args(generics: &GenericArgs, buf: &mut dyn Write) -> fmt::Result {
|
||||||
|
let mut first = true;
|
||||||
|
let args = if generics.has_self_type {
|
||||||
|
let (self_ty, args) = generics.args.split_first().unwrap();
|
||||||
|
write!(buf, "Self=")?;
|
||||||
|
print_generic_arg(self_ty, buf)?;
|
||||||
|
first = false;
|
||||||
|
args
|
||||||
|
} else {
|
||||||
|
&generics.args
|
||||||
|
};
|
||||||
|
for arg in args {
|
||||||
|
if !first {
|
||||||
|
write!(buf, ", ")?;
|
||||||
|
}
|
||||||
|
first = false;
|
||||||
|
print_generic_arg(arg, buf)?;
|
||||||
|
}
|
||||||
|
for binding in &generics.bindings {
|
||||||
|
if !first {
|
||||||
|
write!(buf, ", ")?;
|
||||||
|
}
|
||||||
|
first = false;
|
||||||
|
write!(buf, "{}", binding.name)?;
|
||||||
|
if !binding.bounds.is_empty() {
|
||||||
|
write!(buf, ": ")?;
|
||||||
|
print_type_bounds(&binding.bounds, buf)?;
|
||||||
|
}
|
||||||
|
if let Some(ty) = &binding.type_ref {
|
||||||
|
write!(buf, " = ")?;
|
||||||
|
print_type_ref(ty, buf)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn print_generic_arg(arg: &GenericArg, buf: &mut dyn Write) -> fmt::Result {
|
||||||
|
match arg {
|
||||||
|
GenericArg::Type(ty) => print_type_ref(ty, buf),
|
||||||
|
GenericArg::Const(c) => write!(buf, "{}", c),
|
||||||
|
GenericArg::Lifetime(lt) => write!(buf, "{}", lt.name),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn print_type_ref(type_ref: &TypeRef, buf: &mut dyn Write) -> fmt::Result {
|
||||||
|
// FIXME: deduplicate with `HirDisplay` impl
|
||||||
|
match type_ref {
|
||||||
|
TypeRef::Never => write!(buf, "!")?,
|
||||||
|
TypeRef::Placeholder => write!(buf, "_")?,
|
||||||
|
TypeRef::Tuple(fields) => {
|
||||||
|
write!(buf, "(")?;
|
||||||
|
for (i, field) in fields.iter().enumerate() {
|
||||||
|
if i != 0 {
|
||||||
|
write!(buf, ", ")?;
|
||||||
|
}
|
||||||
|
print_type_ref(field, buf)?;
|
||||||
|
}
|
||||||
|
write!(buf, ")")?;
|
||||||
|
}
|
||||||
|
TypeRef::Path(path) => print_path(path, buf)?,
|
||||||
|
TypeRef::RawPtr(pointee, mtbl) => {
|
||||||
|
let mtbl = match mtbl {
|
||||||
|
Mutability::Shared => "*const",
|
||||||
|
Mutability::Mut => "*mut",
|
||||||
|
};
|
||||||
|
write!(buf, "{} ", mtbl)?;
|
||||||
|
print_type_ref(pointee, buf)?;
|
||||||
|
}
|
||||||
|
TypeRef::Reference(pointee, lt, mtbl) => {
|
||||||
|
let mtbl = match mtbl {
|
||||||
|
Mutability::Shared => "",
|
||||||
|
Mutability::Mut => "mut ",
|
||||||
|
};
|
||||||
|
write!(buf, "&")?;
|
||||||
|
if let Some(lt) = lt {
|
||||||
|
write!(buf, "{} ", lt.name)?;
|
||||||
|
}
|
||||||
|
write!(buf, "{}", mtbl)?;
|
||||||
|
print_type_ref(pointee, buf)?;
|
||||||
|
}
|
||||||
|
TypeRef::Array(elem, len) => {
|
||||||
|
write!(buf, "[")?;
|
||||||
|
print_type_ref(elem, buf)?;
|
||||||
|
write!(buf, "; {}]", len)?;
|
||||||
|
}
|
||||||
|
TypeRef::Slice(elem) => {
|
||||||
|
write!(buf, "[")?;
|
||||||
|
print_type_ref(elem, buf)?;
|
||||||
|
write!(buf, "]")?;
|
||||||
|
}
|
||||||
|
TypeRef::Fn(args_and_ret, varargs) => {
|
||||||
|
let ((_, return_type), args) =
|
||||||
|
args_and_ret.split_last().expect("TypeRef::Fn is missing return type");
|
||||||
|
write!(buf, "fn(")?;
|
||||||
|
for (i, (_, typeref)) in args.iter().enumerate() {
|
||||||
|
if i != 0 {
|
||||||
|
write!(buf, ", ")?;
|
||||||
|
}
|
||||||
|
print_type_ref(typeref, buf)?;
|
||||||
|
}
|
||||||
|
if *varargs {
|
||||||
|
if !args.is_empty() {
|
||||||
|
write!(buf, ", ")?;
|
||||||
|
}
|
||||||
|
write!(buf, "...")?;
|
||||||
|
}
|
||||||
|
write!(buf, ") -> ")?;
|
||||||
|
print_type_ref(return_type, buf)?;
|
||||||
|
}
|
||||||
|
TypeRef::Macro(_ast_id) => {
|
||||||
|
write!(buf, "<macro>")?;
|
||||||
|
}
|
||||||
|
TypeRef::Error => write!(buf, "{{unknown}}")?,
|
||||||
|
TypeRef::ImplTrait(bounds) => {
|
||||||
|
write!(buf, "impl ")?;
|
||||||
|
print_type_bounds(bounds, buf)?;
|
||||||
|
}
|
||||||
|
TypeRef::DynTrait(bounds) => {
|
||||||
|
write!(buf, "dyn ")?;
|
||||||
|
print_type_bounds(bounds, buf)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn print_type_bounds(
|
||||||
|
bounds: &[Interned<TypeBound>],
|
||||||
|
buf: &mut dyn Write,
|
||||||
|
) -> fmt::Result {
|
||||||
|
for (i, bound) in bounds.iter().enumerate() {
|
||||||
|
if i != 0 {
|
||||||
|
write!(buf, " + ")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
match bound.as_ref() {
|
||||||
|
TypeBound::Path(path, modifier) => {
|
||||||
|
match modifier {
|
||||||
|
TraitBoundModifier::None => (),
|
||||||
|
TraitBoundModifier::Maybe => write!(buf, "?")?,
|
||||||
|
}
|
||||||
|
print_path(path, buf)?;
|
||||||
|
}
|
||||||
|
TypeBound::ForLifetime(lifetimes, path) => {
|
||||||
|
write!(buf, "for<{}> ", lifetimes.iter().format(", "))?;
|
||||||
|
print_path(path, buf)?;
|
||||||
|
}
|
||||||
|
TypeBound::Lifetime(lt) => write!(buf, "{}", lt.name)?,
|
||||||
|
TypeBound::Error => write!(buf, "{{unknown}}")?,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
|
@ -77,6 +77,10 @@ impl Rawness {
|
||||||
Rawness::Ref
|
Rawness::Ref
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_raw(&self) -> bool {
|
||||||
|
matches!(self, Self::RawPtr)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||||
|
|
|
@ -15,7 +15,7 @@ use std::{
|
||||||
use la_arena::{Arena, Idx};
|
use la_arena::{Arena, Idx};
|
||||||
use profile::Count;
|
use profile::Count;
|
||||||
use rustc_hash::FxHasher;
|
use rustc_hash::FxHasher;
|
||||||
use syntax::{ast, match_ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
|
use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
|
||||||
|
|
||||||
/// `AstId` points to an AST node in a specific file.
|
/// `AstId` points to an AST node in a specific file.
|
||||||
pub struct FileAstId<N: AstNode> {
|
pub struct FileAstId<N: AstNode> {
|
||||||
|
@ -92,18 +92,12 @@ impl AstIdMap {
|
||||||
// change parent's id. This means that, say, adding a new function to a
|
// change parent's id. This means that, say, adding a new function to a
|
||||||
// trait does not change ids of top-level items, which helps caching.
|
// trait does not change ids of top-level items, which helps caching.
|
||||||
bdfs(node, |it| {
|
bdfs(node, |it| {
|
||||||
match_ast! {
|
let kind = it.kind();
|
||||||
match it {
|
if ast::Item::can_cast(kind) || ast::BlockExpr::can_cast(kind) {
|
||||||
ast::Item(module_item) => {
|
res.alloc(&it);
|
||||||
res.alloc(module_item.syntax());
|
true
|
||||||
true
|
} else {
|
||||||
},
|
false
|
||||||
ast::BlockExpr(block) => {
|
|
||||||
res.alloc(block.syntax());
|
|
||||||
true
|
|
||||||
},
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
res.map = hashbrown::HashMap::with_capacity_and_hasher(res.arena.len(), ());
|
res.map = hashbrown::HashMap::with_capacity_and_hasher(res.arena.len(), ());
|
||||||
|
@ -123,6 +117,7 @@ impl AstIdMap {
|
||||||
let raw = self.erased_ast_id(item.syntax());
|
let raw = self.erased_ast_id(item.syntax());
|
||||||
FileAstId { raw, _ty: PhantomData }
|
FileAstId { raw, _ty: PhantomData }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
|
fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
|
||||||
let ptr = SyntaxNodePtr::new(item);
|
let ptr = SyntaxNodePtr::new(item);
|
||||||
let hash = hash_ptr(&ptr);
|
let hash = hash_ptr(&ptr);
|
||||||
|
|
|
@ -321,7 +321,11 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
|
||||||
ast::Item::cast(node.clone())?
|
ast::Item::cast(node.clone())?
|
||||||
.attrs()
|
.attrs()
|
||||||
.take(derive_attr_index as usize + 1)
|
.take(derive_attr_index as usize + 1)
|
||||||
// FIXME
|
// FIXME, this resolution should not be done syntactically
|
||||||
|
// derive is a proper macro now, no longer builtin
|
||||||
|
// But we do not have resolution at this stage, this means
|
||||||
|
// we need to know about all macro calls for the given ast item here
|
||||||
|
// so we require some kind of mapping...
|
||||||
.filter(|attr| attr.simple_name().as_deref() == Some("derive"))
|
.filter(|attr| attr.simple_name().as_deref() == Some("derive"))
|
||||||
.map(|it| it.syntax().clone())
|
.map(|it| it.syntax().clone())
|
||||||
.collect()
|
.collect()
|
||||||
|
|
|
@ -130,7 +130,6 @@ pub struct MacroDefId {
|
||||||
pub enum MacroDefKind {
|
pub enum MacroDefKind {
|
||||||
Declarative(AstId<ast::Macro>),
|
Declarative(AstId<ast::Macro>),
|
||||||
BuiltIn(BuiltinFnLikeExpander, AstId<ast::Macro>),
|
BuiltIn(BuiltinFnLikeExpander, AstId<ast::Macro>),
|
||||||
// FIXME: maybe just Builtin and rename BuiltinFnLikeExpander to BuiltinExpander
|
|
||||||
BuiltInAttr(BuiltinAttrExpander, AstId<ast::Macro>),
|
BuiltInAttr(BuiltinAttrExpander, AstId<ast::Macro>),
|
||||||
BuiltInDerive(BuiltinDeriveExpander, AstId<ast::Macro>),
|
BuiltInDerive(BuiltinDeriveExpander, AstId<ast::Macro>),
|
||||||
BuiltInEager(EagerExpander, AstId<ast::Macro>),
|
BuiltInEager(EagerExpander, AstId<ast::Macro>),
|
||||||
|
@ -617,7 +616,7 @@ impl ExpansionInfo {
|
||||||
|
|
||||||
let token_id = match token_id_in_attr_input {
|
let token_id = match token_id_in_attr_input {
|
||||||
Some(token_id) => token_id,
|
Some(token_id) => token_id,
|
||||||
// the token is not inside an attribute's input so do the lookup in the macro_arg as ususal
|
// the token is not inside an attribute's input so do the lookup in the macro_arg as usual
|
||||||
None => {
|
None => {
|
||||||
let relative_range =
|
let relative_range =
|
||||||
token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
|
token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
|
||||||
|
|
|
@ -257,6 +257,7 @@ macro_rules! __known_path {
|
||||||
(core::ops::RangeToInclusive) => {};
|
(core::ops::RangeToInclusive) => {};
|
||||||
(core::ops::RangeInclusive) => {};
|
(core::ops::RangeInclusive) => {};
|
||||||
(core::future::Future) => {};
|
(core::future::Future) => {};
|
||||||
|
(core::future::IntoFuture) => {};
|
||||||
(core::ops::Try) => {};
|
(core::ops::Try) => {};
|
||||||
($path:path) => {
|
($path:path) => {
|
||||||
compile_error!("Please register your known path in the path module")
|
compile_error!("Please register your known path in the path module")
|
||||||
|
|
|
@ -90,10 +90,16 @@ impl Name {
|
||||||
|
|
||||||
/// Resolve a name from the text of token.
|
/// Resolve a name from the text of token.
|
||||||
fn resolve(raw_text: &str) -> Name {
|
fn resolve(raw_text: &str) -> Name {
|
||||||
// When `raw_text` starts with "r#" but the name does not coincide with any
|
|
||||||
// keyword, we never need the prefix so we strip it.
|
|
||||||
match raw_text.strip_prefix("r#") {
|
match raw_text.strip_prefix("r#") {
|
||||||
|
// When `raw_text` starts with "r#" but the name does not coincide with any
|
||||||
|
// keyword, we never need the prefix so we strip it.
|
||||||
Some(text) if !is_raw_identifier(text) => Name::new_text(SmolStr::new(text)),
|
Some(text) if !is_raw_identifier(text) => Name::new_text(SmolStr::new(text)),
|
||||||
|
// Keywords (in the current edition) *can* be used as a name in earlier editions of
|
||||||
|
// Rust, e.g. "try" in Rust 2015. Even in such cases, we keep track of them in their
|
||||||
|
// escaped form.
|
||||||
|
None if is_raw_identifier(raw_text) => {
|
||||||
|
Name::new_text(SmolStr::from_iter(["r#", raw_text]))
|
||||||
|
}
|
||||||
_ => Name::new_text(raw_text.into()),
|
_ => Name::new_text(raw_text.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -260,6 +266,7 @@ pub mod known {
|
||||||
Try,
|
Try,
|
||||||
Ok,
|
Ok,
|
||||||
Future,
|
Future,
|
||||||
|
IntoFuture,
|
||||||
Result,
|
Result,
|
||||||
Option,
|
Option,
|
||||||
Output,
|
Output,
|
||||||
|
@ -393,6 +400,7 @@ pub mod known {
|
||||||
future_trait,
|
future_trait,
|
||||||
index,
|
index,
|
||||||
index_mut,
|
index_mut,
|
||||||
|
into_future,
|
||||||
mul_assign,
|
mul_assign,
|
||||||
mul,
|
mul,
|
||||||
neg,
|
neg,
|
||||||
|
|
|
@ -18,9 +18,9 @@ ena = "0.14.0"
|
||||||
tracing = "0.1.35"
|
tracing = "0.1.35"
|
||||||
rustc-hash = "1.1.0"
|
rustc-hash = "1.1.0"
|
||||||
scoped-tls = "1.0.0"
|
scoped-tls = "1.0.0"
|
||||||
chalk-solve = { version = "0.83.0", default-features = false }
|
chalk-solve = { version = "0.84.0", default-features = false }
|
||||||
chalk-ir = "0.83.0"
|
chalk-ir = "0.84.0"
|
||||||
chalk-recursive = { version = "0.83.0", default-features = false }
|
chalk-recursive = { version = "0.84.0", default-features = false }
|
||||||
la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
|
la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
|
||||||
once_cell = "1.12.0"
|
once_cell = "1.12.0"
|
||||||
typed-arena = "2.0.1"
|
typed-arena = "2.0.1"
|
||||||
|
|
|
@ -734,6 +734,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
|
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
|
||||||
return (ty, Some(strukt.into()));
|
return (ty, Some(strukt.into()));
|
||||||
}
|
}
|
||||||
|
ValueNs::ImplSelf(impl_id) => (TypeNs::SelfType(impl_id), None),
|
||||||
_ => return (self.err_ty(), None),
|
_ => return (self.err_ty(), None),
|
||||||
},
|
},
|
||||||
Some(ResolveValueResult::Partial(typens, unresolved)) => (typens, Some(unresolved)),
|
Some(ResolveValueResult::Partial(typens, unresolved)) => (typens, Some(unresolved)),
|
||||||
|
@ -875,7 +876,10 @@ impl<'a> InferenceContext<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_future_future_output(&self) -> Option<TypeAliasId> {
|
fn resolve_future_future_output(&self) -> Option<TypeAliasId> {
|
||||||
let trait_ = self.resolve_lang_item(name![future_trait])?.as_trait()?;
|
let trait_ = self
|
||||||
|
.resolver
|
||||||
|
.resolve_known_trait(self.db.upcast(), &path![core::future::IntoFuture])
|
||||||
|
.or_else(|| self.resolve_lang_item(name![future_trait])?.as_trait())?;
|
||||||
self.db.trait_data(trait_).associated_type_by_name(&name![Output])
|
self.db.trait_data(trait_).associated_type_by_name(&name![Output])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -238,18 +238,7 @@ impl<'a> TyLoweringContext<'a> {
|
||||||
})
|
})
|
||||||
.intern(Interner)
|
.intern(Interner)
|
||||||
}
|
}
|
||||||
TypeRef::DynTrait(bounds) => {
|
TypeRef::DynTrait(bounds) => self.lower_dyn_trait(bounds),
|
||||||
let self_ty =
|
|
||||||
TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner);
|
|
||||||
let bounds = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
|
|
||||||
QuantifiedWhereClauses::from_iter(
|
|
||||||
Interner,
|
|
||||||
bounds.iter().flat_map(|b| ctx.lower_type_bound(b, self_ty.clone(), false)),
|
|
||||||
)
|
|
||||||
});
|
|
||||||
let bounds = crate::make_single_type_binders(bounds);
|
|
||||||
TyKind::Dyn(DynTy { bounds, lifetime: static_lifetime() }).intern(Interner)
|
|
||||||
}
|
|
||||||
TypeRef::ImplTrait(bounds) => {
|
TypeRef::ImplTrait(bounds) => {
|
||||||
match self.impl_trait_mode {
|
match self.impl_trait_mode {
|
||||||
ImplTraitLoweringMode::Opaque => {
|
ImplTraitLoweringMode::Opaque => {
|
||||||
|
@ -468,29 +457,10 @@ impl<'a> TyLoweringContext<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
0 => {
|
0 => {
|
||||||
let self_ty = Some(
|
// Trait object type without dyn; this should be handled in upstream. See
|
||||||
TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0))
|
// `lower_path()`.
|
||||||
.intern(Interner),
|
stdx::never!("unexpected fully resolved trait path");
|
||||||
);
|
TyKind::Error.intern(Interner)
|
||||||
let trait_ref = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
|
|
||||||
ctx.lower_trait_ref_from_resolved_path(
|
|
||||||
trait_,
|
|
||||||
resolved_segment,
|
|
||||||
self_ty,
|
|
||||||
)
|
|
||||||
});
|
|
||||||
let dyn_ty = DynTy {
|
|
||||||
bounds: crate::make_single_type_binders(
|
|
||||||
QuantifiedWhereClauses::from_iter(
|
|
||||||
Interner,
|
|
||||||
Some(crate::wrap_empty_binders(WhereClause::Implemented(
|
|
||||||
trait_ref,
|
|
||||||
))),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
lifetime: static_lifetime(),
|
|
||||||
};
|
|
||||||
TyKind::Dyn(dyn_ty).intern(Interner)
|
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
// FIXME report error (ambiguous associated type)
|
// FIXME report error (ambiguous associated type)
|
||||||
|
@ -555,11 +525,20 @@ impl<'a> TyLoweringContext<'a> {
|
||||||
let (ty, res) = self.lower_ty_ext(type_ref);
|
let (ty, res) = self.lower_ty_ext(type_ref);
|
||||||
return self.lower_ty_relative_path(ty, res, path.segments());
|
return self.lower_ty_relative_path(ty, res, path.segments());
|
||||||
}
|
}
|
||||||
|
|
||||||
let (resolution, remaining_index) =
|
let (resolution, remaining_index) =
|
||||||
match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
|
match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
|
||||||
Some(it) => it,
|
Some(it) => it,
|
||||||
None => return (TyKind::Error.intern(Interner), None),
|
None => return (TyKind::Error.intern(Interner), None),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if matches!(resolution, TypeNs::TraitId(_)) && remaining_index.is_none() {
|
||||||
|
// trait object type without dyn
|
||||||
|
let bound = TypeBound::Path(path.clone(), TraitBoundModifier::None);
|
||||||
|
let ty = self.lower_dyn_trait(&[Interned::new(bound)]);
|
||||||
|
return (ty, None);
|
||||||
|
}
|
||||||
|
|
||||||
let (resolved_segment, remaining_segments) = match remaining_index {
|
let (resolved_segment, remaining_segments) = match remaining_index {
|
||||||
None => (
|
None => (
|
||||||
path.segments().last().expect("resolved path has at least one element"),
|
path.segments().last().expect("resolved path has at least one element"),
|
||||||
|
@ -987,6 +966,18 @@ impl<'a> TyLoweringContext<'a> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn lower_dyn_trait(&self, bounds: &[Interned<TypeBound>]) -> Ty {
|
||||||
|
let self_ty = TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner);
|
||||||
|
let bounds = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
|
||||||
|
QuantifiedWhereClauses::from_iter(
|
||||||
|
Interner,
|
||||||
|
bounds.iter().flat_map(|b| ctx.lower_type_bound(b, self_ty.clone(), false)),
|
||||||
|
)
|
||||||
|
});
|
||||||
|
let bounds = crate::make_single_type_binders(bounds);
|
||||||
|
TyKind::Dyn(DynTy { bounds, lifetime: static_lifetime() }).intern(Interner)
|
||||||
|
}
|
||||||
|
|
||||||
fn lower_impl_trait(
|
fn lower_impl_trait(
|
||||||
&self,
|
&self,
|
||||||
bounds: &[Interned<TypeBound>],
|
bounds: &[Interned<TypeBound>],
|
||||||
|
|
|
@ -1064,6 +1064,14 @@ pub fn resolve_indexing_op(
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
macro_rules! check_that {
|
||||||
|
($cond:expr) => {
|
||||||
|
if !$cond {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
fn is_valid_candidate(
|
fn is_valid_candidate(
|
||||||
table: &mut InferenceTable<'_>,
|
table: &mut InferenceTable<'_>,
|
||||||
name: Option<&Name>,
|
name: Option<&Name>,
|
||||||
|
@ -1072,54 +1080,10 @@ fn is_valid_candidate(
|
||||||
self_ty: &Ty,
|
self_ty: &Ty,
|
||||||
visible_from_module: Option<ModuleId>,
|
visible_from_module: Option<ModuleId>,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
macro_rules! check_that {
|
|
||||||
($cond:expr) => {
|
|
||||||
if !$cond {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
let db = table.db;
|
let db = table.db;
|
||||||
match item {
|
match item {
|
||||||
AssocItemId::FunctionId(m) => {
|
AssocItemId::FunctionId(m) => {
|
||||||
let data = db.function_data(m);
|
is_valid_fn_candidate(table, m, name, receiver_ty, self_ty, visible_from_module)
|
||||||
|
|
||||||
check_that!(name.map_or(true, |n| n == &data.name));
|
|
||||||
check_that!(visible_from_module.map_or(true, |from_module| {
|
|
||||||
let v = db.function_visibility(m).is_visible_from(db.upcast(), from_module);
|
|
||||||
if !v {
|
|
||||||
cov_mark::hit!(autoderef_candidate_not_visible);
|
|
||||||
}
|
|
||||||
v
|
|
||||||
}));
|
|
||||||
|
|
||||||
table.run_in_snapshot(|table| {
|
|
||||||
let subst = TyBuilder::subst_for_def(db, m).fill_with_inference_vars(table).build();
|
|
||||||
let expect_self_ty = match m.lookup(db.upcast()).container {
|
|
||||||
ItemContainerId::TraitId(_) => {
|
|
||||||
subst.at(Interner, 0).assert_ty_ref(Interner).clone()
|
|
||||||
}
|
|
||||||
ItemContainerId::ImplId(impl_id) => {
|
|
||||||
subst.apply(db.impl_self_ty(impl_id).skip_binders().clone(), Interner)
|
|
||||||
}
|
|
||||||
// We should only get called for associated items (impl/trait)
|
|
||||||
ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
|
|
||||||
unreachable!()
|
|
||||||
}
|
|
||||||
};
|
|
||||||
check_that!(table.unify(&expect_self_ty, self_ty));
|
|
||||||
if let Some(receiver_ty) = receiver_ty {
|
|
||||||
check_that!(data.has_self_param());
|
|
||||||
|
|
||||||
let sig = db.callable_item_signature(m.into());
|
|
||||||
let expected_receiver =
|
|
||||||
sig.map(|s| s.params()[0].clone()).substitute(Interner, &subst);
|
|
||||||
|
|
||||||
check_that!(table.unify(&receiver_ty, &expected_receiver));
|
|
||||||
}
|
|
||||||
true
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
AssocItemId::ConstId(c) => {
|
AssocItemId::ConstId(c) => {
|
||||||
let data = db.const_data(c);
|
let data = db.const_data(c);
|
||||||
|
@ -1152,6 +1116,94 @@ fn is_valid_candidate(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn is_valid_fn_candidate(
|
||||||
|
table: &mut InferenceTable<'_>,
|
||||||
|
fn_id: FunctionId,
|
||||||
|
name: Option<&Name>,
|
||||||
|
receiver_ty: Option<&Ty>,
|
||||||
|
self_ty: &Ty,
|
||||||
|
visible_from_module: Option<ModuleId>,
|
||||||
|
) -> bool {
|
||||||
|
let db = table.db;
|
||||||
|
let data = db.function_data(fn_id);
|
||||||
|
|
||||||
|
check_that!(name.map_or(true, |n| n == &data.name));
|
||||||
|
check_that!(visible_from_module.map_or(true, |from_module| {
|
||||||
|
let v = db.function_visibility(fn_id).is_visible_from(db.upcast(), from_module);
|
||||||
|
if !v {
|
||||||
|
cov_mark::hit!(autoderef_candidate_not_visible);
|
||||||
|
}
|
||||||
|
v
|
||||||
|
}));
|
||||||
|
|
||||||
|
table.run_in_snapshot(|table| {
|
||||||
|
let container = fn_id.lookup(db.upcast()).container;
|
||||||
|
let impl_subst = match container {
|
||||||
|
ItemContainerId::ImplId(it) => {
|
||||||
|
TyBuilder::subst_for_def(db, it).fill_with_inference_vars(table).build()
|
||||||
|
}
|
||||||
|
ItemContainerId::TraitId(it) => {
|
||||||
|
TyBuilder::subst_for_def(db, it).fill_with_inference_vars(table).build()
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let fn_subst = TyBuilder::subst_for_def(db, fn_id)
|
||||||
|
.use_parent_substs(&impl_subst)
|
||||||
|
.fill_with_inference_vars(table)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
let expect_self_ty = match container {
|
||||||
|
ItemContainerId::TraitId(_) => fn_subst.at(Interner, 0).assert_ty_ref(Interner).clone(),
|
||||||
|
ItemContainerId::ImplId(impl_id) => {
|
||||||
|
fn_subst.apply(db.impl_self_ty(impl_id).skip_binders().clone(), Interner)
|
||||||
|
}
|
||||||
|
// We should only get called for associated items (impl/trait)
|
||||||
|
ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
|
||||||
|
unreachable!()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
check_that!(table.unify(&expect_self_ty, self_ty));
|
||||||
|
|
||||||
|
if let Some(receiver_ty) = receiver_ty {
|
||||||
|
check_that!(data.has_self_param());
|
||||||
|
|
||||||
|
let sig = db.callable_item_signature(fn_id.into());
|
||||||
|
let expected_receiver =
|
||||||
|
sig.map(|s| s.params()[0].clone()).substitute(Interner, &fn_subst);
|
||||||
|
|
||||||
|
check_that!(table.unify(&receiver_ty, &expected_receiver));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let ItemContainerId::ImplId(impl_id) = container {
|
||||||
|
// We need to consider the bounds on the impl to distinguish functions of the same name
|
||||||
|
// for a type.
|
||||||
|
let predicates = db.generic_predicates(impl_id.into());
|
||||||
|
predicates
|
||||||
|
.iter()
|
||||||
|
.map(|predicate| {
|
||||||
|
let (p, b) = predicate
|
||||||
|
.clone()
|
||||||
|
.substitute(Interner, &impl_subst)
|
||||||
|
// Skipping the inner binders is ok, as we don't handle quantified where
|
||||||
|
// clauses yet.
|
||||||
|
.into_value_and_skipped_binders();
|
||||||
|
stdx::always!(b.len(Interner) == 0);
|
||||||
|
p
|
||||||
|
})
|
||||||
|
// It's ok to get ambiguity here, as we may not have enough information to prove
|
||||||
|
// obligations. We'll check if the user is calling the selected method properly
|
||||||
|
// later anyway.
|
||||||
|
.all(|p| table.try_obligation(p.cast(Interner)).is_some())
|
||||||
|
} else {
|
||||||
|
// For `ItemContainerId::TraitId`, we check if `self_ty` implements the trait in
|
||||||
|
// `iterate_trait_method_candidates()`.
|
||||||
|
// For others, this function shouldn't be called.
|
||||||
|
true
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
pub fn implements_trait(
|
pub fn implements_trait(
|
||||||
ty: &Canonical<Ty>,
|
ty: &Canonical<Ty>,
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
|
|
|
@ -1790,3 +1790,46 @@ impl u16 {
|
||||||
"#,
|
"#,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn with_impl_bounds() {
|
||||||
|
check_types(
|
||||||
|
r#"
|
||||||
|
trait Trait {}
|
||||||
|
struct Foo<T>(T);
|
||||||
|
impl Trait for isize {}
|
||||||
|
|
||||||
|
impl<T: Trait> Foo<T> {
|
||||||
|
fn foo() -> isize { 0 }
|
||||||
|
fn bar(&self) -> isize { 0 }
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Foo<()> {
|
||||||
|
fn foo() {}
|
||||||
|
fn bar(&self) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn f() {
|
||||||
|
let _ = Foo::<isize>::foo();
|
||||||
|
//^isize
|
||||||
|
let _ = Foo(0isize).bar();
|
||||||
|
//^isize
|
||||||
|
let _ = Foo::<()>::foo();
|
||||||
|
//^()
|
||||||
|
let _ = Foo(()).bar();
|
||||||
|
//^()
|
||||||
|
let _ = Foo::<usize>::foo();
|
||||||
|
//^{unknown}
|
||||||
|
let _ = Foo(0usize).bar();
|
||||||
|
//^{unknown}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn g<T: Trait>(a: T) {
|
||||||
|
let _ = Foo::<T>::foo();
|
||||||
|
//^isize
|
||||||
|
let _ = Foo(a).bar();
|
||||||
|
//^isize
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -488,6 +488,42 @@ fn infer_adt_pattern() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tuple_struct_destructured_with_self() {
|
||||||
|
check_infer(
|
||||||
|
r#"
|
||||||
|
struct Foo(usize,);
|
||||||
|
impl Foo {
|
||||||
|
fn f() {
|
||||||
|
let Self(s,) = &Foo(0,);
|
||||||
|
let Self(s,) = &mut Foo(0,);
|
||||||
|
let Self(s,) = Foo(0,);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
42..151 '{ ... }': ()
|
||||||
|
56..64 'Self(s,)': Foo
|
||||||
|
61..62 's': &usize
|
||||||
|
67..75 '&Foo(0,)': &Foo
|
||||||
|
68..71 'Foo': Foo(usize) -> Foo
|
||||||
|
68..75 'Foo(0,)': Foo
|
||||||
|
72..73 '0': usize
|
||||||
|
89..97 'Self(s,)': Foo
|
||||||
|
94..95 's': &mut usize
|
||||||
|
100..112 '&mut Foo(0,)': &mut Foo
|
||||||
|
105..108 'Foo': Foo(usize) -> Foo
|
||||||
|
105..112 'Foo(0,)': Foo
|
||||||
|
109..110 '0': usize
|
||||||
|
126..134 'Self(s,)': Foo
|
||||||
|
131..132 's': usize
|
||||||
|
137..140 'Foo': Foo(usize) -> Foo
|
||||||
|
137..144 'Foo(0,)': Foo
|
||||||
|
141..142 '0': usize
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn enum_variant_through_self_in_pattern() {
|
fn enum_variant_through_self_in_pattern() {
|
||||||
check_infer(
|
check_infer(
|
||||||
|
|
|
@ -1648,3 +1648,20 @@ fn main() {
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn trailing_empty_macro() {
|
||||||
|
cov_mark::check!(empty_macro_in_trailing_position_is_removed);
|
||||||
|
check_no_mismatches(
|
||||||
|
r#"
|
||||||
|
macro_rules! m2 {
|
||||||
|
($($t:tt)*) => {$($t)*};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn macrostmts() -> u8 {
|
||||||
|
m2! { 0 }
|
||||||
|
m2! {}
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -137,6 +137,31 @@ fn not_send() -> Box<dyn Future<Output = ()> + 'static> {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn into_future_trait() {
|
||||||
|
check_types(
|
||||||
|
r#"
|
||||||
|
//- minicore: future
|
||||||
|
struct Futurable;
|
||||||
|
impl core::future::IntoFuture for Futurable {
|
||||||
|
type Output = u64;
|
||||||
|
type IntoFuture = IntFuture;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct IntFuture;
|
||||||
|
impl core::future::Future for IntFuture {
|
||||||
|
type Output = u64;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test() {
|
||||||
|
let r = Futurable;
|
||||||
|
let v = r.await;
|
||||||
|
v;
|
||||||
|
} //^ u64
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn infer_try() {
|
fn infer_try() {
|
||||||
check_types(
|
check_types(
|
||||||
|
@ -1476,6 +1501,34 @@ fn test(x: Trait, y: &Trait) -> u64 {
|
||||||
165..172 'z.foo()': u64
|
165..172 'z.foo()': u64
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
|
|
||||||
|
check_infer_with_mismatches(
|
||||||
|
r#"
|
||||||
|
//- minicore: fn, coerce_unsized
|
||||||
|
struct S;
|
||||||
|
impl S {
|
||||||
|
fn foo(&self) {}
|
||||||
|
}
|
||||||
|
fn f(_: &Fn(S)) {}
|
||||||
|
fn main() {
|
||||||
|
f(&|number| number.foo());
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
31..35 'self': &S
|
||||||
|
37..39 '{}': ()
|
||||||
|
47..48 '_': &dyn Fn(S)
|
||||||
|
58..60 '{}': ()
|
||||||
|
71..105 '{ ...()); }': ()
|
||||||
|
77..78 'f': fn f(&dyn Fn(S))
|
||||||
|
77..102 'f(&|nu...foo())': ()
|
||||||
|
79..101 '&|numb....foo()': &|S| -> ()
|
||||||
|
80..101 '|numbe....foo()': |S| -> ()
|
||||||
|
81..87 'number': S
|
||||||
|
89..95 'number': S
|
||||||
|
89..101 'number.foo()': ()
|
||||||
|
"#]],
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -72,7 +72,7 @@ use itertools::Itertools;
|
||||||
use nameres::diagnostics::DefDiagnosticKind;
|
use nameres::diagnostics::DefDiagnosticKind;
|
||||||
use once_cell::unsync::Lazy;
|
use once_cell::unsync::Lazy;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use stdx::{format_to, impl_from, never};
|
use stdx::{impl_from, never};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasAttrs as _, HasDocComments, HasName},
|
ast::{self, HasAttrs as _, HasDocComments, HasName},
|
||||||
AstNode, AstPtr, SmolStr, SyntaxNodePtr, TextRange, T,
|
AstNode, AstPtr, SmolStr, SyntaxNodePtr, TextRange, T,
|
||||||
|
@ -1136,6 +1136,20 @@ impl DefWithBody {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn id(&self) -> DefWithBodyId {
|
||||||
|
match self {
|
||||||
|
DefWithBody::Function(it) => it.id.into(),
|
||||||
|
DefWithBody::Static(it) => it.id.into(),
|
||||||
|
DefWithBody::Const(it) => it.id.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A textual representation of the HIR of this def's body for debugging purposes.
|
||||||
|
pub fn debug_hir(self, db: &dyn HirDatabase) -> String {
|
||||||
|
let body = db.body(self.id());
|
||||||
|
body.pretty_print(db.upcast(), self.id())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
|
pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
|
||||||
let krate = self.module(db).id.krate();
|
let krate = self.module(db).id.krate();
|
||||||
|
|
||||||
|
@ -1470,19 +1484,6 @@ impl Function {
|
||||||
let def_map = db.crate_def_map(loc.krate(db).into());
|
let def_map = db.crate_def_map(loc.krate(db).into());
|
||||||
def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() })
|
def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A textual representation of the HIR of this function for debugging purposes.
|
|
||||||
pub fn debug_hir(self, db: &dyn HirDatabase) -> String {
|
|
||||||
let body = db.body(self.id.into());
|
|
||||||
|
|
||||||
let mut result = String::new();
|
|
||||||
format_to!(result, "HIR expressions in the body of `{}`:\n", self.name(db));
|
|
||||||
for (id, expr) in body.exprs.iter() {
|
|
||||||
format_to!(result, "{:?}: {:?}\n", id, expr);
|
|
||||||
}
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note: logically, this belongs to `hir_ty`, but we are not using it there yet.
|
// Note: logically, this belongs to `hir_ty`, but we are not using it there yet.
|
||||||
|
@ -2777,20 +2778,32 @@ impl Type {
|
||||||
self.ty.is_unknown()
|
self.ty.is_unknown()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks that particular type `ty` implements `std::future::Future`.
|
/// Checks that particular type `ty` implements `std::future::IntoFuture` or
|
||||||
|
/// `std::future::Future`.
|
||||||
/// This function is used in `.await` syntax completion.
|
/// This function is used in `.await` syntax completion.
|
||||||
pub fn impls_future(&self, db: &dyn HirDatabase) -> bool {
|
pub fn impls_into_future(&self, db: &dyn HirDatabase) -> bool {
|
||||||
let std_future_trait = db
|
let trait_ = db
|
||||||
.lang_item(self.env.krate, SmolStr::new_inline("future_trait"))
|
.lang_item(self.env.krate, SmolStr::new_inline("into_future"))
|
||||||
.and_then(|it| it.as_trait());
|
.and_then(|it| {
|
||||||
let std_future_trait = match std_future_trait {
|
let into_future_fn = it.as_function()?;
|
||||||
|
let assoc_item = as_assoc_item(db, AssocItem::Function, into_future_fn)?;
|
||||||
|
let into_future_trait = assoc_item.containing_trait_or_trait_impl(db)?;
|
||||||
|
Some(into_future_trait.id)
|
||||||
|
})
|
||||||
|
.or_else(|| {
|
||||||
|
let future_trait =
|
||||||
|
db.lang_item(self.env.krate, SmolStr::new_inline("future_trait"))?;
|
||||||
|
future_trait.as_trait()
|
||||||
|
});
|
||||||
|
|
||||||
|
let trait_ = match trait_ {
|
||||||
Some(it) => it,
|
Some(it) => it,
|
||||||
None => return false,
|
None => return false,
|
||||||
};
|
};
|
||||||
|
|
||||||
let canonical_ty =
|
let canonical_ty =
|
||||||
Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) };
|
Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) };
|
||||||
method_resolution::implements_trait(&canonical_ty, db, self.env.clone(), std_future_trait)
|
method_resolution::implements_trait(&canonical_ty, db, self.env.clone(), trait_)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks that particular type `ty` implements `std::ops::FnOnce`.
|
/// Checks that particular type `ty` implements `std::ops::FnOnce`.
|
||||||
|
|
|
@ -27,6 +27,7 @@ use hir_def::{
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
builtin_fn_macro::BuiltinFnLikeExpander,
|
builtin_fn_macro::BuiltinFnLikeExpander,
|
||||||
hygiene::Hygiene,
|
hygiene::Hygiene,
|
||||||
|
mod_path::path,
|
||||||
name,
|
name,
|
||||||
name::{AsName, Name},
|
name::{AsName, Name},
|
||||||
HirFileId, InFile,
|
HirFileId, InFile,
|
||||||
|
@ -269,14 +270,35 @@ impl SourceAnalyzer {
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
await_expr: &ast::AwaitExpr,
|
await_expr: &ast::AwaitExpr,
|
||||||
) -> Option<FunctionId> {
|
) -> Option<FunctionId> {
|
||||||
let ty = self.ty_of_expr(db, &await_expr.expr()?.into())?;
|
let mut ty = self.ty_of_expr(db, &await_expr.expr()?.into())?.clone();
|
||||||
|
|
||||||
let op_fn = db
|
let into_future_trait = self
|
||||||
|
.resolver
|
||||||
|
.resolve_known_trait(db.upcast(), &path![core::future::IntoFuture])
|
||||||
|
.map(Trait::from);
|
||||||
|
|
||||||
|
if let Some(into_future_trait) = into_future_trait {
|
||||||
|
let type_ = Type::new_with_resolver(db, &self.resolver, ty.clone());
|
||||||
|
if type_.impls_trait(db, into_future_trait, &[]) {
|
||||||
|
let items = into_future_trait.items(db);
|
||||||
|
let into_future_type = items.into_iter().find_map(|item| match item {
|
||||||
|
AssocItem::TypeAlias(alias)
|
||||||
|
if alias.name(db) == hir_expand::name![IntoFuture] =>
|
||||||
|
{
|
||||||
|
Some(alias)
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
})?;
|
||||||
|
let future_trait = type_.normalize_trait_assoc_type(db, &[], into_future_type)?;
|
||||||
|
ty = future_trait.ty;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let poll_fn = db
|
||||||
.lang_item(self.resolver.krate(), hir_expand::name![poll].to_smol_str())?
|
.lang_item(self.resolver.krate(), hir_expand::name![poll].to_smol_str())?
|
||||||
.as_function()?;
|
.as_function()?;
|
||||||
let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn).push(ty.clone()).build();
|
let substs = hir_ty::TyBuilder::subst_for_def(db, poll_fn).push(ty.clone()).build();
|
||||||
|
Some(self.resolve_impl_method_or_trait_def(db, poll_fn, &substs))
|
||||||
Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn resolve_prefix_expr(
|
pub(crate) fn resolve_prefix_expr(
|
||||||
|
|
|
@ -29,7 +29,7 @@ use super::remove_unused_param::range_to_remove;
|
||||||
|
|
||||||
// Assist: extract_module
|
// Assist: extract_module
|
||||||
//
|
//
|
||||||
// Extracts a selected region as seperate module. All the references, visibility and imports are
|
// Extracts a selected region as separate module. All the references, visibility and imports are
|
||||||
// resolved.
|
// resolved.
|
||||||
//
|
//
|
||||||
// ```
|
// ```
|
||||||
|
@ -105,7 +105,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
|
||||||
//
|
//
|
||||||
//- Thirdly, resolving all the imports this includes removing paths from imports
|
//- Thirdly, resolving all the imports this includes removing paths from imports
|
||||||
// outside the module, shifting/cloning them inside new module, or shifting the imports, or making
|
// outside the module, shifting/cloning them inside new module, or shifting the imports, or making
|
||||||
// new import statemnts
|
// new import statements
|
||||||
|
|
||||||
//We are getting item usages and record_fields together, record_fields
|
//We are getting item usages and record_fields together, record_fields
|
||||||
//for change_visibility and usages for first point mentioned above in the process
|
//for change_visibility and usages for first point mentioned above in the process
|
||||||
|
@ -661,7 +661,7 @@ fn check_intersection_and_push(
|
||||||
import_path: TextRange,
|
import_path: TextRange,
|
||||||
) {
|
) {
|
||||||
if import_paths_to_be_removed.len() > 0 {
|
if import_paths_to_be_removed.len() > 0 {
|
||||||
// Text ranges recieved here for imports are extended to the
|
// Text ranges received here for imports are extended to the
|
||||||
// next/previous comma which can cause intersections among them
|
// next/previous comma which can cause intersections among them
|
||||||
// and later deletion of these can cause panics similar
|
// and later deletion of these can cause panics similar
|
||||||
// to reported in #11766. So to mitigate it, we
|
// to reported in #11766. So to mitigate it, we
|
||||||
|
|
|
@ -61,43 +61,8 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
let fn_name = &*name_ref.text();
|
let fn_name = &*name_ref.text();
|
||||||
let target_module;
|
let TargetInfo { target_module, adt_name, target, file, insert_offset } =
|
||||||
let mut adt_name = None;
|
fn_target_info(ctx, path, &call, fn_name)?;
|
||||||
|
|
||||||
let (target, file, insert_offset) = match path.qualifier() {
|
|
||||||
Some(qualifier) => match ctx.sema.resolve_path(&qualifier) {
|
|
||||||
Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))) => {
|
|
||||||
target_module = Some(module);
|
|
||||||
get_fn_target(ctx, &target_module, call.clone())?
|
|
||||||
}
|
|
||||||
Some(hir::PathResolution::Def(hir::ModuleDef::Adt(adt))) => {
|
|
||||||
if let hir::Adt::Enum(_) = adt {
|
|
||||||
// Don't suggest generating function if the name starts with an uppercase letter
|
|
||||||
if name_ref.text().starts_with(char::is_uppercase) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let current_module = ctx.sema.scope(call.syntax())?.module();
|
|
||||||
let module = adt.module(ctx.sema.db);
|
|
||||||
target_module = if current_module == module { None } else { Some(module) };
|
|
||||||
if current_module.krate() != module.krate() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
let (impl_, file) = get_adt_source(ctx, &adt, fn_name)?;
|
|
||||||
let (target, insert_offset) = get_method_target(ctx, &module, &impl_)?;
|
|
||||||
adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None };
|
|
||||||
(target, file, insert_offset)
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
_ => {
|
|
||||||
target_module = None;
|
|
||||||
get_fn_target(ctx, &target_module, call.clone())?
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let function_builder = FunctionBuilder::from_call(ctx, &call, fn_name, target_module, target)?;
|
let function_builder = FunctionBuilder::from_call(ctx, &call, fn_name, target_module, target)?;
|
||||||
let text_range = call.syntax().text_range();
|
let text_range = call.syntax().text_range();
|
||||||
let label = format!("Generate {} function", function_builder.fn_name);
|
let label = format!("Generate {} function", function_builder.fn_name);
|
||||||
|
@ -113,6 +78,57 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct TargetInfo {
|
||||||
|
target_module: Option<Module>,
|
||||||
|
adt_name: Option<hir::Name>,
|
||||||
|
target: GeneratedFunctionTarget,
|
||||||
|
file: FileId,
|
||||||
|
insert_offset: TextSize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TargetInfo {
|
||||||
|
fn new(
|
||||||
|
target_module: Option<Module>,
|
||||||
|
adt_name: Option<hir::Name>,
|
||||||
|
target: GeneratedFunctionTarget,
|
||||||
|
file: FileId,
|
||||||
|
insert_offset: TextSize,
|
||||||
|
) -> Self {
|
||||||
|
Self { target_module, adt_name, target, file, insert_offset }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fn_target_info(
|
||||||
|
ctx: &AssistContext<'_>,
|
||||||
|
path: ast::Path,
|
||||||
|
call: &CallExpr,
|
||||||
|
fn_name: &str,
|
||||||
|
) -> Option<TargetInfo> {
|
||||||
|
match path.qualifier() {
|
||||||
|
Some(qualifier) => match ctx.sema.resolve_path(&qualifier) {
|
||||||
|
Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))) => {
|
||||||
|
get_fn_target_info(ctx, &Some(module), call.clone())
|
||||||
|
}
|
||||||
|
Some(hir::PathResolution::Def(hir::ModuleDef::Adt(adt))) => {
|
||||||
|
if let hir::Adt::Enum(_) = adt {
|
||||||
|
// Don't suggest generating function if the name starts with an uppercase letter
|
||||||
|
if fn_name.starts_with(char::is_uppercase) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assoc_fn_target_info(ctx, call, adt, fn_name)
|
||||||
|
}
|
||||||
|
Some(hir::PathResolution::SelfType(impl_)) => {
|
||||||
|
let adt = impl_.self_ty(ctx.db()).as_adt()?;
|
||||||
|
assoc_fn_target_info(ctx, call, adt, fn_name)
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
},
|
||||||
|
_ => get_fn_target_info(ctx, &None, call.clone()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||||
let call: ast::MethodCallExpr = ctx.find_node_at_offset()?;
|
let call: ast::MethodCallExpr = ctx.find_node_at_offset()?;
|
||||||
if ctx.sema.resolve_method_call(&call).is_some() {
|
if ctx.sema.resolve_method_call(&call).is_some() {
|
||||||
|
@ -366,6 +382,15 @@ fn make_return_type(
|
||||||
(ret_type, should_focus_return_type)
|
(ret_type, should_focus_return_type)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_fn_target_info(
|
||||||
|
ctx: &AssistContext<'_>,
|
||||||
|
target_module: &Option<Module>,
|
||||||
|
call: CallExpr,
|
||||||
|
) -> Option<TargetInfo> {
|
||||||
|
let (target, file, insert_offset) = get_fn_target(ctx, target_module, call)?;
|
||||||
|
Some(TargetInfo::new(*target_module, None, target, file, insert_offset))
|
||||||
|
}
|
||||||
|
|
||||||
fn get_fn_target(
|
fn get_fn_target(
|
||||||
ctx: &AssistContext<'_>,
|
ctx: &AssistContext<'_>,
|
||||||
target_module: &Option<Module>,
|
target_module: &Option<Module>,
|
||||||
|
@ -399,6 +424,24 @@ fn get_method_target(
|
||||||
Some((target.clone(), get_insert_offset(&target)))
|
Some((target.clone(), get_insert_offset(&target)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn assoc_fn_target_info(
|
||||||
|
ctx: &AssistContext<'_>,
|
||||||
|
call: &CallExpr,
|
||||||
|
adt: hir::Adt,
|
||||||
|
fn_name: &str,
|
||||||
|
) -> Option<TargetInfo> {
|
||||||
|
let current_module = ctx.sema.scope(call.syntax())?.module();
|
||||||
|
let module = adt.module(ctx.sema.db);
|
||||||
|
let target_module = if current_module == module { None } else { Some(module) };
|
||||||
|
if current_module.krate() != module.krate() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let (impl_, file) = get_adt_source(ctx, &adt, fn_name)?;
|
||||||
|
let (target, insert_offset) = get_method_target(ctx, &module, &impl_)?;
|
||||||
|
let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None };
|
||||||
|
Some(TargetInfo::new(target_module, adt_name, target, file, insert_offset))
|
||||||
|
}
|
||||||
|
|
||||||
fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize {
|
fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize {
|
||||||
match &target {
|
match &target {
|
||||||
GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(),
|
GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(),
|
||||||
|
@ -1633,6 +1676,33 @@ fn bar() ${0:-> _} {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn create_static_method_within_an_impl_with_self_syntax() {
|
||||||
|
check_assist(
|
||||||
|
generate_function,
|
||||||
|
r"
|
||||||
|
struct S;
|
||||||
|
impl S {
|
||||||
|
fn foo(&self) {
|
||||||
|
Self::bar$0();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
",
|
||||||
|
r"
|
||||||
|
struct S;
|
||||||
|
impl S {
|
||||||
|
fn foo(&self) {
|
||||||
|
Self::bar();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn bar() ${0:-> _} {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn no_panic_on_invalid_global_path() {
|
fn no_panic_on_invalid_global_path() {
|
||||||
check_assist(
|
check_assist(
|
||||||
|
|
|
@ -13,7 +13,7 @@ use ide_db::{
|
||||||
use itertools::{izip, Itertools};
|
use itertools::{izip, Itertools};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, edit_in_place::Indent, HasArgList, PathExpr},
|
ast::{self, edit_in_place::Indent, HasArgList, PathExpr},
|
||||||
ted, AstNode,
|
ted, AstNode, NodeOrToken, SyntaxKind,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -311,6 +311,13 @@ fn inline(
|
||||||
} else {
|
} else {
|
||||||
fn_body.clone_for_update()
|
fn_body.clone_for_update()
|
||||||
};
|
};
|
||||||
|
if let Some(t) = body.syntax().ancestors().find_map(ast::Impl::cast).and_then(|i| i.self_ty()) {
|
||||||
|
body.syntax()
|
||||||
|
.descendants_with_tokens()
|
||||||
|
.filter_map(NodeOrToken::into_token)
|
||||||
|
.filter(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW)
|
||||||
|
.for_each(|tok| ted::replace(tok, t.syntax()));
|
||||||
|
}
|
||||||
let usages_for_locals = |local| {
|
let usages_for_locals = |local| {
|
||||||
Definition::Local(local)
|
Definition::Local(local)
|
||||||
.usages(sema)
|
.usages(sema)
|
||||||
|
@ -345,6 +352,7 @@ fn inline(
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
if function.self_param(sema.db).is_some() {
|
if function.self_param(sema.db).is_some() {
|
||||||
let this = || make::name_ref("this").syntax().clone_for_update();
|
let this = || make::name_ref("this").syntax().clone_for_update();
|
||||||
if let Some(self_local) = params[0].2.as_local(sema.db) {
|
if let Some(self_local) = params[0].2.as_local(sema.db) {
|
||||||
|
@ -1188,6 +1196,31 @@ fn bar() -> u32 {
|
||||||
x
|
x
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn inline_call_with_self_type() {
|
||||||
|
check_assist(
|
||||||
|
inline_call,
|
||||||
|
r#"
|
||||||
|
struct A(u32);
|
||||||
|
impl A {
|
||||||
|
fn f() -> Self { Self(114514) }
|
||||||
|
}
|
||||||
|
fn main() {
|
||||||
|
A::f$0();
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
struct A(u32);
|
||||||
|
impl A {
|
||||||
|
fn f() -> Self { Self(114514) }
|
||||||
|
}
|
||||||
|
fn main() {
|
||||||
|
A(114514);
|
||||||
|
}
|
||||||
"#,
|
"#,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
// Some ideas for future improvements:
|
// Some ideas for future improvements:
|
||||||
// - Support replacing aliases which are used in expressions, e.g. `A::new()`.
|
// - Support replacing aliases which are used in expressions, e.g. `A::new()`.
|
||||||
// - "inline_alias_to_users" assist #10881.
|
|
||||||
// - Remove unused aliases if there are no longer any users, see inline_call.rs.
|
// - Remove unused aliases if there are no longer any users, see inline_call.rs.
|
||||||
|
|
||||||
use hir::{HasSource, PathResolution};
|
use hir::{HasSource, PathResolution};
|
||||||
|
use ide_db::{defs::Definition, search::FileReference};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
|
@ -16,6 +16,78 @@ use crate::{
|
||||||
AssistId, AssistKind,
|
AssistId, AssistKind,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Assist: inline_type_alias_uses
|
||||||
|
//
|
||||||
|
// Inline a type alias into all of its uses where possible.
|
||||||
|
//
|
||||||
|
// ```
|
||||||
|
// type $0A = i32;
|
||||||
|
// fn id(x: A) -> A {
|
||||||
|
// x
|
||||||
|
// };
|
||||||
|
// fn foo() {
|
||||||
|
// let _: A = 3;
|
||||||
|
// }
|
||||||
|
// ```
|
||||||
|
// ->
|
||||||
|
// ```
|
||||||
|
// type A = i32;
|
||||||
|
// fn id(x: i32) -> i32 {
|
||||||
|
// x
|
||||||
|
// };
|
||||||
|
// fn foo() {
|
||||||
|
// let _: i32 = 3;
|
||||||
|
// }
|
||||||
|
pub(crate) fn inline_type_alias_uses(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||||
|
let name = ctx.find_node_at_offset::<ast::Name>()?;
|
||||||
|
let ast_alias = name.syntax().parent().and_then(ast::TypeAlias::cast)?;
|
||||||
|
|
||||||
|
let hir_alias = ctx.sema.to_def(&ast_alias)?;
|
||||||
|
let concrete_type = ast_alias.ty()?;
|
||||||
|
|
||||||
|
let usages = Definition::TypeAlias(hir_alias).usages(&ctx.sema);
|
||||||
|
if !usages.at_least_one() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
// until this is ok
|
||||||
|
|
||||||
|
acc.add(
|
||||||
|
AssistId("inline_type_alias_uses", AssistKind::RefactorInline),
|
||||||
|
"Inline type alias into all uses",
|
||||||
|
name.syntax().text_range(),
|
||||||
|
|builder| {
|
||||||
|
let usages = usages.all();
|
||||||
|
|
||||||
|
let mut inline_refs_for_file = |file_id, refs: Vec<FileReference>| {
|
||||||
|
builder.edit_file(file_id);
|
||||||
|
|
||||||
|
let path_types: Vec<ast::PathType> = refs
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|file_ref| match file_ref.name {
|
||||||
|
ast::NameLike::NameRef(path_type) => {
|
||||||
|
path_type.syntax().ancestors().nth(3).and_then(ast::PathType::cast)
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
for (target, replacement) in path_types.into_iter().filter_map(|path_type| {
|
||||||
|
let replacement = inline(&ast_alias, &path_type)?.to_text(&concrete_type);
|
||||||
|
let target = path_type.syntax().text_range();
|
||||||
|
Some((target, replacement))
|
||||||
|
}) {
|
||||||
|
builder.replace(target, replacement);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
for (file_id, refs) in usages.into_iter() {
|
||||||
|
inline_refs_for_file(file_id, refs);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
// Assist: inline_type_alias
|
// Assist: inline_type_alias
|
||||||
//
|
//
|
||||||
// Replace a type alias with its concrete type.
|
// Replace a type alias with its concrete type.
|
||||||
|
@ -36,11 +108,6 @@ use crate::{
|
||||||
// }
|
// }
|
||||||
// ```
|
// ```
|
||||||
pub(crate) fn inline_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
pub(crate) fn inline_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||||
enum Replacement {
|
|
||||||
Generic { lifetime_map: LifetimeMap, const_and_type_map: ConstAndTypeMap },
|
|
||||||
Plain,
|
|
||||||
}
|
|
||||||
|
|
||||||
let alias_instance = ctx.find_node_at_offset::<ast::PathType>()?;
|
let alias_instance = ctx.find_node_at_offset::<ast::PathType>()?;
|
||||||
let concrete_type;
|
let concrete_type;
|
||||||
let replacement;
|
let replacement;
|
||||||
|
@ -59,23 +126,7 @@ pub(crate) fn inline_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
|
||||||
_ => {
|
_ => {
|
||||||
let alias = get_type_alias(&ctx, &alias_instance)?;
|
let alias = get_type_alias(&ctx, &alias_instance)?;
|
||||||
concrete_type = alias.ty()?;
|
concrete_type = alias.ty()?;
|
||||||
|
replacement = inline(&alias, &alias_instance)?;
|
||||||
replacement = if let Some(alias_generics) = alias.generic_param_list() {
|
|
||||||
if alias_generics.generic_params().next().is_none() {
|
|
||||||
cov_mark::hit!(no_generics_params);
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let instance_args =
|
|
||||||
alias_instance.syntax().descendants().find_map(ast::GenericArgList::cast);
|
|
||||||
|
|
||||||
Replacement::Generic {
|
|
||||||
lifetime_map: LifetimeMap::new(&instance_args, &alias_generics)?,
|
|
||||||
const_and_type_map: ConstAndTypeMap::new(&instance_args, &alias_generics)?,
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Replacement::Plain
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -85,19 +136,45 @@ pub(crate) fn inline_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
|
||||||
AssistId("inline_type_alias", AssistKind::RefactorInline),
|
AssistId("inline_type_alias", AssistKind::RefactorInline),
|
||||||
"Inline type alias",
|
"Inline type alias",
|
||||||
target,
|
target,
|
||||||
|builder| {
|
|builder| builder.replace(target, replacement.to_text(&concrete_type)),
|
||||||
let replacement_text = match replacement {
|
|
||||||
Replacement::Generic { lifetime_map, const_and_type_map } => {
|
|
||||||
create_replacement(&lifetime_map, &const_and_type_map, &concrete_type)
|
|
||||||
}
|
|
||||||
Replacement::Plain => concrete_type.to_string(),
|
|
||||||
};
|
|
||||||
|
|
||||||
builder.replace(target, replacement_text);
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Replacement {
|
||||||
|
fn to_text(&self, concrete_type: &ast::Type) -> String {
|
||||||
|
match self {
|
||||||
|
Replacement::Generic { lifetime_map, const_and_type_map } => {
|
||||||
|
create_replacement(&lifetime_map, &const_and_type_map, &concrete_type)
|
||||||
|
}
|
||||||
|
Replacement::Plain => concrete_type.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
enum Replacement {
|
||||||
|
Generic { lifetime_map: LifetimeMap, const_and_type_map: ConstAndTypeMap },
|
||||||
|
Plain,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn inline(alias_def: &ast::TypeAlias, alias_instance: &ast::PathType) -> Option<Replacement> {
|
||||||
|
let repl = if let Some(alias_generics) = alias_def.generic_param_list() {
|
||||||
|
if alias_generics.generic_params().next().is_none() {
|
||||||
|
cov_mark::hit!(no_generics_params);
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let instance_args =
|
||||||
|
alias_instance.syntax().descendants().find_map(ast::GenericArgList::cast);
|
||||||
|
|
||||||
|
Replacement::Generic {
|
||||||
|
lifetime_map: LifetimeMap::new(&instance_args, &alias_generics)?,
|
||||||
|
const_and_type_map: ConstAndTypeMap::new(&instance_args, &alias_generics)?,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Replacement::Plain
|
||||||
|
};
|
||||||
|
Some(repl)
|
||||||
|
}
|
||||||
|
|
||||||
struct LifetimeMap(HashMap<String, ast::Lifetime>);
|
struct LifetimeMap(HashMap<String, ast::Lifetime>);
|
||||||
|
|
||||||
impl LifetimeMap {
|
impl LifetimeMap {
|
||||||
|
@ -835,4 +912,90 @@ trait Tr {
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
mod inline_type_alias_uses {
|
||||||
|
use crate::{handlers::inline_type_alias::inline_type_alias_uses, tests::check_assist};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn inline_uses() {
|
||||||
|
check_assist(
|
||||||
|
inline_type_alias_uses,
|
||||||
|
r#"
|
||||||
|
type $0A = u32;
|
||||||
|
|
||||||
|
fn foo() {
|
||||||
|
let _: A = 3;
|
||||||
|
let _: A = 4;
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
type A = u32;
|
||||||
|
|
||||||
|
fn foo() {
|
||||||
|
let _: u32 = 3;
|
||||||
|
let _: u32 = 4;
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn inline_uses_across_files() {
|
||||||
|
check_assist(
|
||||||
|
inline_type_alias_uses,
|
||||||
|
r#"
|
||||||
|
//- /lib.rs
|
||||||
|
mod foo;
|
||||||
|
type $0T<E> = Vec<E>;
|
||||||
|
fn f() -> T<&str> {
|
||||||
|
vec!["hello"]
|
||||||
|
}
|
||||||
|
|
||||||
|
//- /foo.rs
|
||||||
|
use super::T;
|
||||||
|
fn foo() {
|
||||||
|
let _: T<i8> = Vec::new();
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
//- /lib.rs
|
||||||
|
mod foo;
|
||||||
|
type T<E> = Vec<E>;
|
||||||
|
fn f() -> Vec<&str> {
|
||||||
|
vec!["hello"]
|
||||||
|
}
|
||||||
|
|
||||||
|
//- /foo.rs
|
||||||
|
use super::T;
|
||||||
|
fn foo() {
|
||||||
|
let _: Vec<i8> = Vec::new();
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn inline_uses_across_files_2() {
|
||||||
|
check_assist(
|
||||||
|
inline_type_alias_uses,
|
||||||
|
r#"
|
||||||
|
//- /lib.rs
|
||||||
|
mod foo;
|
||||||
|
type $0I = i32;
|
||||||
|
|
||||||
|
//- /foo.rs
|
||||||
|
use super::I;
|
||||||
|
fn foo() {
|
||||||
|
let _: I = 0;
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
use super::I;
|
||||||
|
fn foo() {
|
||||||
|
let _: i32 = 0;
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -88,7 +88,7 @@ pub(crate) fn replace_turbofish_with_explicit_type(
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
} else if let Some(InferType(t)) = let_stmt.ty() {
|
} else if let Some(InferType(t)) = let_stmt.ty() {
|
||||||
// If there's a type inferrence underscore, we can offer to replace it with the type in
|
// If there's a type inference underscore, we can offer to replace it with the type in
|
||||||
// the turbofish.
|
// the turbofish.
|
||||||
// let x: _ = fn::<...>();
|
// let x: _ = fn::<...>();
|
||||||
let underscore_range = t.syntax().text_range();
|
let underscore_range = t.syntax().text_range();
|
||||||
|
|
|
@ -243,6 +243,7 @@ mod handlers {
|
||||||
inline_call::inline_into_callers,
|
inline_call::inline_into_callers,
|
||||||
inline_local_variable::inline_local_variable,
|
inline_local_variable::inline_local_variable,
|
||||||
inline_type_alias::inline_type_alias,
|
inline_type_alias::inline_type_alias,
|
||||||
|
inline_type_alias::inline_type_alias_uses,
|
||||||
introduce_named_generic::introduce_named_generic,
|
introduce_named_generic::introduce_named_generic,
|
||||||
introduce_named_lifetime::introduce_named_lifetime,
|
introduce_named_lifetime::introduce_named_lifetime,
|
||||||
invert_if::invert_if,
|
invert_if::invert_if,
|
||||||
|
|
|
@ -1356,6 +1356,31 @@ fn main() {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn doctest_inline_type_alias_uses() {
|
||||||
|
check_doc_test(
|
||||||
|
"inline_type_alias_uses",
|
||||||
|
r#####"
|
||||||
|
type $0A = i32;
|
||||||
|
fn id(x: A) -> A {
|
||||||
|
x
|
||||||
|
};
|
||||||
|
fn foo() {
|
||||||
|
let _: A = 3;
|
||||||
|
}
|
||||||
|
"#####,
|
||||||
|
r#####"
|
||||||
|
type A = i32;
|
||||||
|
fn id(x: i32) -> i32 {
|
||||||
|
x
|
||||||
|
};
|
||||||
|
fn foo() {
|
||||||
|
let _: i32 = 3;
|
||||||
|
}
|
||||||
|
"#####,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn doctest_introduce_named_generic() {
|
fn doctest_introduce_named_generic() {
|
||||||
check_doc_test(
|
check_doc_test(
|
||||||
|
|
|
@ -55,6 +55,7 @@ const USELESS_METHODS: &[&str] = &[
|
||||||
"iter",
|
"iter",
|
||||||
"into_iter",
|
"into_iter",
|
||||||
"iter_mut",
|
"iter_mut",
|
||||||
|
"into_future",
|
||||||
];
|
];
|
||||||
|
|
||||||
pub(crate) fn for_generic_parameter(ty: &ast::ImplTraitType) -> SmolStr {
|
pub(crate) fn for_generic_parameter(ty: &ast::ImplTraitType) -> SmolStr {
|
||||||
|
@ -75,7 +76,7 @@ pub(crate) fn for_generic_parameter(ty: &ast::ImplTraitType) -> SmolStr {
|
||||||
/// In current implementation, the function tries to get the name from
|
/// In current implementation, the function tries to get the name from
|
||||||
/// the following sources:
|
/// the following sources:
|
||||||
///
|
///
|
||||||
/// * if expr is an argument to function/method, use paramter name
|
/// * if expr is an argument to function/method, use parameter name
|
||||||
/// * if expr is a function/method call, use function name
|
/// * if expr is a function/method call, use function name
|
||||||
/// * expression type name if it exists (E.g. `()`, `fn() -> ()` or `!` do not have names)
|
/// * expression type name if it exists (E.g. `()`, `fn() -> ()` or `!` do not have names)
|
||||||
/// * fallback: `var_name`
|
/// * fallback: `var_name`
|
||||||
|
@ -85,7 +86,7 @@ pub(crate) fn for_generic_parameter(ty: &ast::ImplTraitType) -> SmolStr {
|
||||||
/// Currently it sticks to the first name found.
|
/// Currently it sticks to the first name found.
|
||||||
// FIXME: Microoptimize and return a `SmolStr` here.
|
// FIXME: Microoptimize and return a `SmolStr` here.
|
||||||
pub(crate) fn for_variable(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> String {
|
pub(crate) fn for_variable(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> String {
|
||||||
// `from_param` does not benifit from stripping
|
// `from_param` does not benefit from stripping
|
||||||
// it need the largest context possible
|
// it need the largest context possible
|
||||||
// so we check firstmost
|
// so we check firstmost
|
||||||
if let Some(name) = from_param(expr, sema) {
|
if let Some(name) = from_param(expr, sema) {
|
||||||
|
|
|
@ -19,7 +19,7 @@ pub(crate) fn complete_dot(
|
||||||
};
|
};
|
||||||
|
|
||||||
// Suggest .await syntax for types that implement Future trait
|
// Suggest .await syntax for types that implement Future trait
|
||||||
if receiver_ty.impls_future(ctx.db) {
|
if receiver_ty.impls_into_future(ctx.db) {
|
||||||
let mut item =
|
let mut item =
|
||||||
CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), "await");
|
CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), "await");
|
||||||
item.detail("expr.await");
|
item.detail("expr.await");
|
||||||
|
|
|
@ -75,16 +75,17 @@ impl Future for A {}
|
||||||
fn foo(a: A) { a.$0 }
|
fn foo(a: A) { a.$0 }
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
kw await expr.await
|
kw await expr.await
|
||||||
sn box Box::new(expr)
|
me into_future() (as IntoFuture) fn(self) -> <Self as IntoFuture>::IntoFuture
|
||||||
sn call function(expr)
|
sn box Box::new(expr)
|
||||||
sn dbg dbg!(expr)
|
sn call function(expr)
|
||||||
sn dbgr dbg!(&expr)
|
sn dbg dbg!(expr)
|
||||||
sn let let
|
sn dbgr dbg!(&expr)
|
||||||
sn letm let mut
|
sn let let
|
||||||
sn match match expr {}
|
sn letm let mut
|
||||||
sn ref &expr
|
sn match match expr {}
|
||||||
sn refm &mut expr
|
sn ref &expr
|
||||||
|
sn refm &mut expr
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -98,18 +99,45 @@ fn foo() {
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
kw await expr.await
|
kw await expr.await
|
||||||
sn box Box::new(expr)
|
me into_future() (use core::future::IntoFuture) fn(self) -> <Self as IntoFuture>::IntoFuture
|
||||||
sn call function(expr)
|
sn box Box::new(expr)
|
||||||
sn dbg dbg!(expr)
|
sn call function(expr)
|
||||||
sn dbgr dbg!(&expr)
|
sn dbg dbg!(expr)
|
||||||
sn let let
|
sn dbgr dbg!(&expr)
|
||||||
sn letm let mut
|
sn let let
|
||||||
sn match match expr {}
|
sn letm let mut
|
||||||
sn ref &expr
|
sn match match expr {}
|
||||||
sn refm &mut expr
|
sn ref &expr
|
||||||
|
sn refm &mut expr
|
||||||
"#]],
|
"#]],
|
||||||
)
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_completion_await_impls_into_future() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- minicore: future
|
||||||
|
use core::future::*;
|
||||||
|
struct A {}
|
||||||
|
impl IntoFuture for A {}
|
||||||
|
fn foo(a: A) { a.$0 }
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
kw await expr.await
|
||||||
|
me into_future() (as IntoFuture) fn(self) -> <Self as IntoFuture>::IntoFuture
|
||||||
|
sn box Box::new(expr)
|
||||||
|
sn call function(expr)
|
||||||
|
sn dbg dbg!(expr)
|
||||||
|
sn dbgr dbg!(&expr)
|
||||||
|
sn let let
|
||||||
|
sn letm let mut
|
||||||
|
sn match match expr {}
|
||||||
|
sn ref &expr
|
||||||
|
sn refm &mut expr
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -173,7 +173,7 @@ impl FormatStrParser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
(State::Expr, ':') if chars.peek().copied() == Some(':') => {
|
(State::Expr, ':') if chars.peek().copied() == Some(':') => {
|
||||||
// path seperator
|
// path separator
|
||||||
current_expr.push_str("::");
|
current_expr.push_str("::");
|
||||||
chars.next();
|
chars.next();
|
||||||
}
|
}
|
||||||
|
@ -185,7 +185,7 @@ impl FormatStrParser {
|
||||||
current_expr = String::new();
|
current_expr = String::new();
|
||||||
self.state = State::FormatOpts;
|
self.state = State::FormatOpts;
|
||||||
} else {
|
} else {
|
||||||
// We're inside of braced expression, assume that it's a struct field name/value delimeter.
|
// We're inside of braced expression, assume that it's a struct field name/value delimiter.
|
||||||
current_expr.push(chr);
|
current_expr.push(chr);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -129,7 +129,7 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn literal_struct_completion_edit() {
|
fn literal_struct_completion_edit() {
|
||||||
check_edit(
|
check_edit(
|
||||||
"FooDesc {…}",
|
"FooDesc{}",
|
||||||
r#"
|
r#"
|
||||||
struct FooDesc { pub bar: bool }
|
struct FooDesc { pub bar: bool }
|
||||||
|
|
||||||
|
@ -154,7 +154,7 @@ fn baz() {
|
||||||
#[test]
|
#[test]
|
||||||
fn literal_struct_impl_self_completion() {
|
fn literal_struct_impl_self_completion() {
|
||||||
check_edit(
|
check_edit(
|
||||||
"Self {…}",
|
"Self{}",
|
||||||
r#"
|
r#"
|
||||||
struct Foo {
|
struct Foo {
|
||||||
bar: u64,
|
bar: u64,
|
||||||
|
@ -180,7 +180,7 @@ impl Foo {
|
||||||
);
|
);
|
||||||
|
|
||||||
check_edit(
|
check_edit(
|
||||||
"Self(…)",
|
"Self()",
|
||||||
r#"
|
r#"
|
||||||
mod submod {
|
mod submod {
|
||||||
pub struct Foo(pub u64);
|
pub struct Foo(pub u64);
|
||||||
|
@ -209,7 +209,7 @@ impl submod::Foo {
|
||||||
#[test]
|
#[test]
|
||||||
fn literal_struct_completion_from_sub_modules() {
|
fn literal_struct_completion_from_sub_modules() {
|
||||||
check_edit(
|
check_edit(
|
||||||
"submod::Struct {…}",
|
"submod::Struct{}",
|
||||||
r#"
|
r#"
|
||||||
mod submod {
|
mod submod {
|
||||||
pub struct Struct {
|
pub struct Struct {
|
||||||
|
@ -238,7 +238,7 @@ fn f() -> submod::Struct {
|
||||||
#[test]
|
#[test]
|
||||||
fn literal_struct_complexion_module() {
|
fn literal_struct_complexion_module() {
|
||||||
check_edit(
|
check_edit(
|
||||||
"FooDesc {…}",
|
"FooDesc{}",
|
||||||
r#"
|
r#"
|
||||||
mod _69latrick {
|
mod _69latrick {
|
||||||
pub struct FooDesc { pub six: bool, pub neuf: Vec<String>, pub bar: bool }
|
pub struct FooDesc { pub six: bool, pub neuf: Vec<String>, pub bar: bool }
|
||||||
|
|
|
@ -565,6 +565,7 @@ fn main() { Foo::Fo$0 }
|
||||||
kind: SymbolKind(
|
kind: SymbolKind(
|
||||||
Variant,
|
Variant,
|
||||||
),
|
),
|
||||||
|
lookup: "Foo{}",
|
||||||
detail: "Foo { x: i32, y: i32 }",
|
detail: "Foo { x: i32, y: i32 }",
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
@ -591,6 +592,7 @@ fn main() { Foo::Fo$0 }
|
||||||
kind: SymbolKind(
|
kind: SymbolKind(
|
||||||
Variant,
|
Variant,
|
||||||
),
|
),
|
||||||
|
lookup: "Foo()",
|
||||||
detail: "Foo(i32, i32)",
|
detail: "Foo(i32, i32)",
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
@ -707,7 +709,7 @@ fn main() { let _: m::Spam = S$0 }
|
||||||
kind: SymbolKind(
|
kind: SymbolKind(
|
||||||
Variant,
|
Variant,
|
||||||
),
|
),
|
||||||
lookup: "Spam::Bar(…)",
|
lookup: "Spam::Bar()",
|
||||||
detail: "m::Spam::Bar(i32)",
|
detail: "m::Spam::Bar(i32)",
|
||||||
relevance: CompletionRelevance {
|
relevance: CompletionRelevance {
|
||||||
exact_name_match: false,
|
exact_name_match: false,
|
||||||
|
|
|
@ -10,8 +10,8 @@ use crate::{
|
||||||
render::{
|
render::{
|
||||||
compute_ref_match, compute_type_match,
|
compute_ref_match, compute_type_match,
|
||||||
variant::{
|
variant::{
|
||||||
format_literal_label, render_record_lit, render_tuple_lit, visible_fields,
|
format_literal_label, format_literal_lookup, render_record_lit, render_tuple_lit,
|
||||||
RenderedLiteral,
|
visible_fields, RenderedLiteral,
|
||||||
},
|
},
|
||||||
RenderContext,
|
RenderContext,
|
||||||
},
|
},
|
||||||
|
@ -97,13 +97,20 @@ fn render(
|
||||||
if !should_add_parens {
|
if !should_add_parens {
|
||||||
kind = StructKind::Unit;
|
kind = StructKind::Unit;
|
||||||
}
|
}
|
||||||
|
let label = format_literal_label(&qualified_name, kind);
|
||||||
|
let lookup = if qualified {
|
||||||
|
format_literal_lookup(&short_qualified_name.to_string(), kind)
|
||||||
|
} else {
|
||||||
|
format_literal_lookup(&qualified_name, kind)
|
||||||
|
};
|
||||||
|
|
||||||
let mut item = CompletionItem::new(
|
let mut item = CompletionItem::new(
|
||||||
CompletionItemKind::SymbolKind(thing.symbol_kind()),
|
CompletionItemKind::SymbolKind(thing.symbol_kind()),
|
||||||
ctx.source_range(),
|
ctx.source_range(),
|
||||||
format_literal_label(&qualified_name, kind),
|
label,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
item.lookup_by(lookup);
|
||||||
item.detail(rendered.detail);
|
item.detail(rendered.detail);
|
||||||
|
|
||||||
match snippet_cap {
|
match snippet_cap {
|
||||||
|
@ -111,9 +118,6 @@ fn render(
|
||||||
None => item.insert_text(rendered.literal),
|
None => item.insert_text(rendered.literal),
|
||||||
};
|
};
|
||||||
|
|
||||||
if qualified {
|
|
||||||
item.lookup_by(format_literal_label(&short_qualified_name.to_string(), kind));
|
|
||||||
}
|
|
||||||
item.set_documentation(thing.docs(db)).set_deprecated(thing.is_deprecated(&ctx));
|
item.set_documentation(thing.docs(db)).set_deprecated(thing.is_deprecated(&ctx));
|
||||||
|
|
||||||
let ty = thing.ty(db);
|
let ty = thing.ty(db);
|
||||||
|
|
|
@ -8,7 +8,7 @@ use syntax::SmolStr;
|
||||||
use crate::{
|
use crate::{
|
||||||
context::{ParamContext, ParamKind, PathCompletionCtx, PatternContext},
|
context::{ParamContext, ParamKind, PathCompletionCtx, PatternContext},
|
||||||
render::{
|
render::{
|
||||||
variant::{format_literal_label, visible_fields},
|
variant::{format_literal_label, format_literal_lookup, visible_fields},
|
||||||
RenderContext,
|
RenderContext,
|
||||||
},
|
},
|
||||||
CompletionItem, CompletionItemKind,
|
CompletionItem, CompletionItemKind,
|
||||||
|
@ -34,9 +34,10 @@ pub(crate) fn render_struct_pat(
|
||||||
let (name, escaped_name) = (name.unescaped().to_smol_str(), name.to_smol_str());
|
let (name, escaped_name) = (name.unescaped().to_smol_str(), name.to_smol_str());
|
||||||
let kind = strukt.kind(ctx.db());
|
let kind = strukt.kind(ctx.db());
|
||||||
let label = format_literal_label(name.as_str(), kind);
|
let label = format_literal_label(name.as_str(), kind);
|
||||||
|
let lookup = format_literal_lookup(name.as_str(), kind);
|
||||||
let pat = render_pat(&ctx, pattern_ctx, &escaped_name, kind, &visible_fields, fields_omitted)?;
|
let pat = render_pat(&ctx, pattern_ctx, &escaped_name, kind, &visible_fields, fields_omitted)?;
|
||||||
|
|
||||||
Some(build_completion(ctx, label, pat, strukt))
|
Some(build_completion(ctx, label, lookup, pat, strukt))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn render_variant_pat(
|
pub(crate) fn render_variant_pat(
|
||||||
|
@ -60,11 +61,14 @@ pub(crate) fn render_variant_pat(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let (label, pat) = match path_ctx {
|
let (label, lookup, pat) = match path_ctx {
|
||||||
Some(PathCompletionCtx { has_call_parens: true, .. }) => (name, escaped_name.to_string()),
|
Some(PathCompletionCtx { has_call_parens: true, .. }) => {
|
||||||
|
(name.clone(), name, escaped_name.to_string())
|
||||||
|
}
|
||||||
_ => {
|
_ => {
|
||||||
let kind = variant.kind(ctx.db());
|
let kind = variant.kind(ctx.db());
|
||||||
let label = format_literal_label(name.as_str(), kind);
|
let label = format_literal_label(name.as_str(), kind);
|
||||||
|
let lookup = format_literal_lookup(name.as_str(), kind);
|
||||||
let pat = render_pat(
|
let pat = render_pat(
|
||||||
&ctx,
|
&ctx,
|
||||||
pattern_ctx,
|
pattern_ctx,
|
||||||
|
@ -73,16 +77,17 @@ pub(crate) fn render_variant_pat(
|
||||||
&visible_fields,
|
&visible_fields,
|
||||||
fields_omitted,
|
fields_omitted,
|
||||||
)?;
|
)?;
|
||||||
(label, pat)
|
(label, lookup, pat)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
Some(build_completion(ctx, label, pat, variant))
|
Some(build_completion(ctx, label, lookup, pat, variant))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_completion(
|
fn build_completion(
|
||||||
ctx: RenderContext<'_>,
|
ctx: RenderContext<'_>,
|
||||||
label: SmolStr,
|
label: SmolStr,
|
||||||
|
lookup: SmolStr,
|
||||||
pat: String,
|
pat: String,
|
||||||
def: impl HasAttrs + Copy,
|
def: impl HasAttrs + Copy,
|
||||||
) -> CompletionItem {
|
) -> CompletionItem {
|
||||||
|
@ -90,6 +95,7 @@ fn build_completion(
|
||||||
item.set_documentation(ctx.docs(def))
|
item.set_documentation(ctx.docs(def))
|
||||||
.set_deprecated(ctx.is_deprecated(def))
|
.set_deprecated(ctx.is_deprecated(def))
|
||||||
.detail(&pat)
|
.detail(&pat)
|
||||||
|
.lookup_by(lookup)
|
||||||
.set_relevance(ctx.completion_relevance());
|
.set_relevance(ctx.completion_relevance());
|
||||||
match ctx.snippet_cap() {
|
match ctx.snippet_cap() {
|
||||||
Some(snippet_cap) => item.insert_snippet(snippet_cap, pat),
|
Some(snippet_cap) => item.insert_snippet(snippet_cap, pat),
|
||||||
|
|
|
@ -6,7 +6,7 @@ use itertools::Itertools;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
render::{
|
render::{
|
||||||
variant::{format_literal_label, visible_fields},
|
variant::{format_literal_label, format_literal_lookup, visible_fields},
|
||||||
RenderContext,
|
RenderContext,
|
||||||
},
|
},
|
||||||
CompletionItem, CompletionItemKind,
|
CompletionItem, CompletionItemKind,
|
||||||
|
@ -24,13 +24,16 @@ pub(crate) fn render_union_literal(
|
||||||
Some(p) => (p.unescaped().to_string(), p.to_string()),
|
Some(p) => (p.unescaped().to_string(), p.to_string()),
|
||||||
None => (name.unescaped().to_string(), name.to_string()),
|
None => (name.unescaped().to_string(), name.to_string()),
|
||||||
};
|
};
|
||||||
|
let label = format_literal_label(&name.to_smol_str(), StructKind::Record);
|
||||||
|
let lookup = format_literal_lookup(&name.to_smol_str(), StructKind::Record);
|
||||||
let mut item = CompletionItem::new(
|
let mut item = CompletionItem::new(
|
||||||
CompletionItemKind::SymbolKind(SymbolKind::Union),
|
CompletionItemKind::SymbolKind(SymbolKind::Union),
|
||||||
ctx.source_range(),
|
ctx.source_range(),
|
||||||
format_literal_label(&name.to_smol_str(), StructKind::Record),
|
label,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
item.lookup_by(lookup);
|
||||||
|
|
||||||
let fields = un.fields(ctx.db());
|
let fields = un.fields(ctx.db());
|
||||||
let (fields, fields_omitted) = visible_fields(ctx.completion, &fields, un)?;
|
let (fields, fields_omitted) = visible_fields(ctx.completion, &fields, un)?;
|
||||||
|
|
||||||
|
|
|
@ -94,3 +94,12 @@ pub(crate) fn format_literal_label(name: &str, kind: StructKind) -> SmolStr {
|
||||||
StructKind::Unit => name.into(),
|
StructKind::Unit => name.into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Format a struct, etc. literal option for lookup used in completions filtering.
|
||||||
|
pub(crate) fn format_literal_lookup(name: &str, kind: StructKind) -> SmolStr {
|
||||||
|
match kind {
|
||||||
|
StructKind::Tuple => SmolStr::from_iter([name, "()"]),
|
||||||
|
StructKind::Record => SmolStr::from_iter([name, "{}"]),
|
||||||
|
StructKind::Unit => name.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -159,7 +159,7 @@ pub mod some_module {
|
||||||
pub struct ThiiiiiirdStruct;
|
pub struct ThiiiiiirdStruct;
|
||||||
// contains all letters from the query, but not in the beginning, displayed second
|
// contains all letters from the query, but not in the beginning, displayed second
|
||||||
pub struct AfterThirdStruct;
|
pub struct AfterThirdStruct;
|
||||||
// contains all letters from the query in the begginning, displayed first
|
// contains all letters from the query in the beginning, displayed first
|
||||||
pub struct ThirdStruct;
|
pub struct ThirdStruct;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -467,7 +467,7 @@ fn foo() {
|
||||||
fn completes_enum_variant_pat() {
|
fn completes_enum_variant_pat() {
|
||||||
cov_mark::check!(enum_variant_pattern_path);
|
cov_mark::check!(enum_variant_pattern_path);
|
||||||
check_edit(
|
check_edit(
|
||||||
"RecordVariant {…}",
|
"RecordVariant{}",
|
||||||
r#"
|
r#"
|
||||||
enum Enum {
|
enum Enum {
|
||||||
RecordVariant { field: u32 }
|
RecordVariant { field: u32 }
|
||||||
|
|
|
@ -20,7 +20,7 @@ impl RootDatabase {
|
||||||
pub fn apply_change(&mut self, change: Change) {
|
pub fn apply_change(&mut self, change: Change) {
|
||||||
let _p = profile::span("RootDatabase::apply_change");
|
let _p = profile::span("RootDatabase::apply_change");
|
||||||
self.request_cancellation();
|
self.request_cancellation();
|
||||||
tracing::info!("apply_change {:?}", change);
|
tracing::trace!("apply_change {:?}", change);
|
||||||
if let Some(roots) = &change.roots {
|
if let Some(roots) = &change.roots {
|
||||||
let mut local_roots = FxHashSet::default();
|
let mut local_roots = FxHashSet::default();
|
||||||
let mut library_roots = FxHashSet::default();
|
let mut library_roots = FxHashSet::default();
|
||||||
|
|
|
@ -82,7 +82,7 @@ impl Definition {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Textual range of the identifier which will change when renaming this
|
/// Textual range of the identifier which will change when renaming this
|
||||||
/// `Definition`. Note that some definitions, like buitin types, can't be
|
/// `Definition`. Note that some definitions, like builtin types, can't be
|
||||||
/// renamed.
|
/// renamed.
|
||||||
pub fn range_for_rename(self, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange> {
|
pub fn range_for_rename(self, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange> {
|
||||||
let res = match self {
|
let res = match self {
|
||||||
|
|
|
@ -402,7 +402,9 @@ impl<'a> FindUsages<'a> {
|
||||||
.or_else(|| ty.as_builtin().map(|builtin| builtin.name()))
|
.or_else(|| ty.as_builtin().map(|builtin| builtin.name()))
|
||||||
})
|
})
|
||||||
};
|
};
|
||||||
self.def.name(sema.db).or_else(self_kw_refs).map(|it| it.to_smol_str())
|
// We need to unescape the name in case it is written without "r#" in earlier
|
||||||
|
// editions of Rust where it isn't a keyword.
|
||||||
|
self.def.name(sema.db).or_else(self_kw_refs).map(|it| it.unescaped().to_smol_str())
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let name = match &name {
|
let name = match &name {
|
||||||
|
|
|
@ -750,7 +750,7 @@ fn main() {
|
||||||
enum Foo { A }
|
enum Foo { A }
|
||||||
fn main() {
|
fn main() {
|
||||||
// FIXME: this should not bail out but current behavior is such as the old algorithm.
|
// FIXME: this should not bail out but current behavior is such as the old algorithm.
|
||||||
// ExprValidator::validate_match(..) checks types of top level patterns incorrecly.
|
// ExprValidator::validate_match(..) checks types of top level patterns incorrectly.
|
||||||
match Foo::A {
|
match Foo::A {
|
||||||
ref _x => {}
|
ref _x => {}
|
||||||
Foo::A => {}
|
Foo::A => {}
|
||||||
|
|
|
@ -1664,6 +1664,40 @@ fn f() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_await_into_future_poll() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- minicore: future
|
||||||
|
|
||||||
|
struct Futurable;
|
||||||
|
|
||||||
|
impl core::future::IntoFuture for Futurable {
|
||||||
|
type IntoFuture = MyFut;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct MyFut;
|
||||||
|
|
||||||
|
impl core::future::Future for MyFut {
|
||||||
|
type Output = ();
|
||||||
|
|
||||||
|
fn poll(
|
||||||
|
//^^^^
|
||||||
|
self: std::pin::Pin<&mut Self>,
|
||||||
|
cx: &mut std::task::Context<'_>
|
||||||
|
) -> std::task::Poll<Self::Output>
|
||||||
|
{
|
||||||
|
()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn f() {
|
||||||
|
Futurable.await$0;
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn goto_try_op() {
|
fn goto_try_op() {
|
||||||
check(
|
check(
|
||||||
|
|
|
@ -27,6 +27,7 @@ use crate::{
|
||||||
pub struct HoverConfig {
|
pub struct HoverConfig {
|
||||||
pub links_in_hover: bool,
|
pub links_in_hover: bool,
|
||||||
pub documentation: Option<HoverDocFormat>,
|
pub documentation: Option<HoverDocFormat>,
|
||||||
|
pub keywords: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HoverConfig {
|
impl HoverConfig {
|
||||||
|
@ -119,6 +120,8 @@ pub(crate) fn hover(
|
||||||
}
|
}
|
||||||
|
|
||||||
let in_attr = matches!(original_token.parent().and_then(ast::TokenTree::cast), Some(tt) if tt.syntax().ancestors().any(|it| ast::Meta::can_cast(it.kind())));
|
let in_attr = matches!(original_token.parent().and_then(ast::TokenTree::cast), Some(tt) if tt.syntax().ancestors().any(|it| ast::Meta::can_cast(it.kind())));
|
||||||
|
// prefer descending the same token kind in attribute expansions, in normal macros text
|
||||||
|
// equivalency is more important
|
||||||
let descended = if in_attr {
|
let descended = if in_attr {
|
||||||
[sema.descend_into_macros_with_kind_preference(original_token.clone())].into()
|
[sema.descend_into_macros_with_kind_preference(original_token.clone())].into()
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -230,7 +230,7 @@ pub(super) fn keyword(
|
||||||
config: &HoverConfig,
|
config: &HoverConfig,
|
||||||
token: &SyntaxToken,
|
token: &SyntaxToken,
|
||||||
) -> Option<HoverResult> {
|
) -> Option<HoverResult> {
|
||||||
if !token.kind().is_keyword() || !config.documentation.is_some() {
|
if !token.kind().is_keyword() || !config.documentation.is_some() || !config.keywords {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let parent = token.parent()?;
|
let parent = token.parent()?;
|
||||||
|
|
|
@ -8,7 +8,11 @@ fn check_hover_no_result(ra_fixture: &str) {
|
||||||
let (analysis, position) = fixture::position(ra_fixture);
|
let (analysis, position) = fixture::position(ra_fixture);
|
||||||
let hover = analysis
|
let hover = analysis
|
||||||
.hover(
|
.hover(
|
||||||
&HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) },
|
&HoverConfig {
|
||||||
|
links_in_hover: true,
|
||||||
|
documentation: Some(HoverDocFormat::Markdown),
|
||||||
|
keywords: true,
|
||||||
|
},
|
||||||
FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
|
FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -20,7 +24,11 @@ fn check(ra_fixture: &str, expect: Expect) {
|
||||||
let (analysis, position) = fixture::position(ra_fixture);
|
let (analysis, position) = fixture::position(ra_fixture);
|
||||||
let hover = analysis
|
let hover = analysis
|
||||||
.hover(
|
.hover(
|
||||||
&HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) },
|
&HoverConfig {
|
||||||
|
links_in_hover: true,
|
||||||
|
documentation: Some(HoverDocFormat::Markdown),
|
||||||
|
keywords: true,
|
||||||
|
},
|
||||||
FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
|
FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
|
||||||
)
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
@ -37,7 +45,11 @@ fn check_hover_no_links(ra_fixture: &str, expect: Expect) {
|
||||||
let (analysis, position) = fixture::position(ra_fixture);
|
let (analysis, position) = fixture::position(ra_fixture);
|
||||||
let hover = analysis
|
let hover = analysis
|
||||||
.hover(
|
.hover(
|
||||||
&HoverConfig { links_in_hover: false, documentation: Some(HoverDocFormat::Markdown) },
|
&HoverConfig {
|
||||||
|
links_in_hover: false,
|
||||||
|
documentation: Some(HoverDocFormat::Markdown),
|
||||||
|
keywords: true,
|
||||||
|
},
|
||||||
FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
|
FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
|
||||||
)
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
@ -54,7 +66,11 @@ fn check_hover_no_markdown(ra_fixture: &str, expect: Expect) {
|
||||||
let (analysis, position) = fixture::position(ra_fixture);
|
let (analysis, position) = fixture::position(ra_fixture);
|
||||||
let hover = analysis
|
let hover = analysis
|
||||||
.hover(
|
.hover(
|
||||||
&HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::PlainText) },
|
&HoverConfig {
|
||||||
|
links_in_hover: true,
|
||||||
|
documentation: Some(HoverDocFormat::PlainText),
|
||||||
|
keywords: true,
|
||||||
|
},
|
||||||
FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
|
FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
|
||||||
)
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
@ -71,7 +87,11 @@ fn check_actions(ra_fixture: &str, expect: Expect) {
|
||||||
let (analysis, file_id, position) = fixture::range_or_position(ra_fixture);
|
let (analysis, file_id, position) = fixture::range_or_position(ra_fixture);
|
||||||
let hover = analysis
|
let hover = analysis
|
||||||
.hover(
|
.hover(
|
||||||
&HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) },
|
&HoverConfig {
|
||||||
|
links_in_hover: true,
|
||||||
|
documentation: Some(HoverDocFormat::Markdown),
|
||||||
|
keywords: true,
|
||||||
|
},
|
||||||
FileRange { file_id, range: position.range_or_empty() },
|
FileRange { file_id, range: position.range_or_empty() },
|
||||||
)
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
@ -83,7 +103,11 @@ fn check_hover_range(ra_fixture: &str, expect: Expect) {
|
||||||
let (analysis, range) = fixture::range(ra_fixture);
|
let (analysis, range) = fixture::range(ra_fixture);
|
||||||
let hover = analysis
|
let hover = analysis
|
||||||
.hover(
|
.hover(
|
||||||
&HoverConfig { links_in_hover: false, documentation: Some(HoverDocFormat::Markdown) },
|
&HoverConfig {
|
||||||
|
links_in_hover: false,
|
||||||
|
documentation: Some(HoverDocFormat::Markdown),
|
||||||
|
keywords: true,
|
||||||
|
},
|
||||||
range,
|
range,
|
||||||
)
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
@ -95,7 +119,11 @@ fn check_hover_range_no_results(ra_fixture: &str) {
|
||||||
let (analysis, range) = fixture::range(ra_fixture);
|
let (analysis, range) = fixture::range(ra_fixture);
|
||||||
let hover = analysis
|
let hover = analysis
|
||||||
.hover(
|
.hover(
|
||||||
&HoverConfig { links_in_hover: false, documentation: Some(HoverDocFormat::Markdown) },
|
&HoverConfig {
|
||||||
|
links_in_hover: false,
|
||||||
|
documentation: Some(HoverDocFormat::Markdown),
|
||||||
|
keywords: true,
|
||||||
|
},
|
||||||
range,
|
range,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
|
@ -130,8 +130,11 @@ impl StaticIndex<'_> {
|
||||||
syntax::NodeOrToken::Node(_) => None,
|
syntax::NodeOrToken::Node(_) => None,
|
||||||
syntax::NodeOrToken::Token(x) => Some(x),
|
syntax::NodeOrToken::Token(x) => Some(x),
|
||||||
});
|
});
|
||||||
let hover_config =
|
let hover_config = HoverConfig {
|
||||||
HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) };
|
links_in_hover: true,
|
||||||
|
documentation: Some(HoverDocFormat::Markdown),
|
||||||
|
keywords: true,
|
||||||
|
};
|
||||||
let tokens = tokens.filter(|token| {
|
let tokens = tokens.filter(|token| {
|
||||||
matches!(
|
matches!(
|
||||||
token.kind(),
|
token.kind(),
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use hir::{Function, Semantics};
|
use hir::{DefWithBody, Semantics};
|
||||||
use ide_db::base_db::FilePosition;
|
use ide_db::base_db::FilePosition;
|
||||||
use ide_db::RootDatabase;
|
use ide_db::RootDatabase;
|
||||||
use syntax::{algo::find_node_at_offset, ast, AstNode};
|
use syntax::{algo::find_node_at_offset, ast, AstNode};
|
||||||
|
@ -19,8 +19,12 @@ fn body_hir(db: &RootDatabase, position: FilePosition) -> Option<String> {
|
||||||
let sema = Semantics::new(db);
|
let sema = Semantics::new(db);
|
||||||
let source_file = sema.parse(position.file_id);
|
let source_file = sema.parse(position.file_id);
|
||||||
|
|
||||||
let function = find_node_at_offset::<ast::Fn>(source_file.syntax(), position.offset)?;
|
let item = find_node_at_offset::<ast::Item>(source_file.syntax(), position.offset)?;
|
||||||
|
let def: DefWithBody = match item {
|
||||||
let function: Function = sema.to_def(&function)?;
|
ast::Item::Fn(it) => sema.to_def(&it)?.into(),
|
||||||
Some(function.debug_hir(db))
|
ast::Item::Const(it) => sema.to_def(&it)?.into(),
|
||||||
|
ast::Item::Static(it) => sema.to_def(&it)?.into(),
|
||||||
|
_ => return None,
|
||||||
|
};
|
||||||
|
Some(def.debug_hir(db))
|
||||||
}
|
}
|
||||||
|
|
|
@ -321,7 +321,7 @@ struct MatchState<'t> {
|
||||||
/// The KleeneOp of this sequence if we are in a repetition.
|
/// The KleeneOp of this sequence if we are in a repetition.
|
||||||
sep_kind: Option<RepeatKind>,
|
sep_kind: Option<RepeatKind>,
|
||||||
|
|
||||||
/// Number of tokens of seperator parsed
|
/// Number of tokens of separator parsed
|
||||||
sep_parsed: Option<usize>,
|
sep_parsed: Option<usize>,
|
||||||
|
|
||||||
/// Matched meta variables bindings
|
/// Matched meta variables bindings
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
//!
|
//!
|
||||||
//! Pure model is represented by the [`base_db::CrateGraph`] from another crate.
|
//! Pure model is represented by the [`base_db::CrateGraph`] from another crate.
|
||||||
//!
|
//!
|
||||||
//! In this crate, we are conserned with "real world" project models.
|
//! In this crate, we are concerned with "real world" project models.
|
||||||
//!
|
//!
|
||||||
//! Specifically, here we have a representation for a Cargo project
|
//! Specifically, here we have a representation for a Cargo project
|
||||||
//! ([`CargoWorkspace`]) and for manually specified layout ([`ProjectJson`]).
|
//! ([`CargoWorkspace`]) and for manually specified layout ([`ProjectJson`]).
|
||||||
|
|
|
@ -770,7 +770,7 @@ fn handle_rustc_crates(
|
||||||
queue.push_back(root_pkg);
|
queue.push_back(root_pkg);
|
||||||
while let Some(pkg) = queue.pop_front() {
|
while let Some(pkg) = queue.pop_front() {
|
||||||
// Don't duplicate packages if they are dependended on a diamond pattern
|
// Don't duplicate packages if they are dependended on a diamond pattern
|
||||||
// N.B. if this line is ommitted, we try to analyse over 4_800_000 crates
|
// N.B. if this line is omitted, we try to analyse over 4_800_000 crates
|
||||||
// which is not ideal
|
// which is not ideal
|
||||||
if rustc_pkg_crates.contains_key(&pkg) {
|
if rustc_pkg_crates.contains_key(&pkg) {
|
||||||
continue;
|
continue;
|
||||||
|
|
|
@ -52,7 +52,7 @@ impl Logger {
|
||||||
// merge chalk filter to our main filter (from RA_LOG env).
|
// merge chalk filter to our main filter (from RA_LOG env).
|
||||||
//
|
//
|
||||||
// The acceptable syntax of CHALK_DEBUG is `target[span{field=value}]=level`.
|
// The acceptable syntax of CHALK_DEBUG is `target[span{field=value}]=level`.
|
||||||
// As the value should only affect chalk crates, we'd better mannually
|
// As the value should only affect chalk crates, we'd better manually
|
||||||
// specify the target. And for simplicity, CHALK_DEBUG only accept the value
|
// specify the target. And for simplicity, CHALK_DEBUG only accept the value
|
||||||
// that specify level.
|
// that specify level.
|
||||||
let chalk_level_dir = std::env::var("CHALK_DEBUG")
|
let chalk_level_dir = std::env::var("CHALK_DEBUG")
|
||||||
|
|
|
@ -45,7 +45,8 @@ mod patch_old_style;
|
||||||
// - foo_command = overrides the subcommand, foo_overrideCommand allows full overwriting, extra args only applies for foo_command
|
// - foo_command = overrides the subcommand, foo_overrideCommand allows full overwriting, extra args only applies for foo_command
|
||||||
|
|
||||||
// Defines the server-side configuration of the rust-analyzer. We generate
|
// Defines the server-side configuration of the rust-analyzer. We generate
|
||||||
// *parts* of VS Code's `package.json` config from this.
|
// *parts* of VS Code's `package.json` config from this. Run `cargo test` to
|
||||||
|
// re-generate that file.
|
||||||
//
|
//
|
||||||
// However, editor specific config, which the server doesn't know about, should
|
// However, editor specific config, which the server doesn't know about, should
|
||||||
// be specified directly in `package.json`.
|
// be specified directly in `package.json`.
|
||||||
|
@ -120,6 +121,10 @@ config_data! {
|
||||||
/// Cargo, you might also want to change
|
/// Cargo, you might also want to change
|
||||||
/// `#rust-analyzer.cargo.buildScripts.overrideCommand#`.
|
/// `#rust-analyzer.cargo.buildScripts.overrideCommand#`.
|
||||||
///
|
///
|
||||||
|
/// If there are multiple linked projects, this command is invoked for
|
||||||
|
/// each of them, with the working directory being the project root
|
||||||
|
/// (i.e., the folder containing the `Cargo.toml`).
|
||||||
|
///
|
||||||
/// An example command would be:
|
/// An example command would be:
|
||||||
///
|
///
|
||||||
/// ```bash
|
/// ```bash
|
||||||
|
@ -243,7 +248,10 @@ config_data! {
|
||||||
hover_actions_run_enable: bool = "true",
|
hover_actions_run_enable: bool = "true",
|
||||||
|
|
||||||
/// Whether to show documentation on hover.
|
/// Whether to show documentation on hover.
|
||||||
hover_documentation_enable: bool = "true",
|
hover_documentation_enable: bool = "true",
|
||||||
|
/// Whether to show keyword hover popups. Only applies when
|
||||||
|
/// `#rust-analyzer.hover.documentation.enable#` is set.
|
||||||
|
hover_documentation_keywords_enable: bool = "true",
|
||||||
/// Use markdown syntax for links in hover.
|
/// Use markdown syntax for links in hover.
|
||||||
hover_links_enable: bool = "true",
|
hover_links_enable: bool = "true",
|
||||||
|
|
||||||
|
@ -1187,6 +1195,7 @@ impl Config {
|
||||||
HoverDocFormat::PlainText
|
HoverDocFormat::PlainText
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
|
keywords: self.data.hover_documentation_keywords_enable,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ use std::{sync::Arc, time::Instant};
|
||||||
use crossbeam_channel::{unbounded, Receiver, Sender};
|
use crossbeam_channel::{unbounded, Receiver, Sender};
|
||||||
use flycheck::FlycheckHandle;
|
use flycheck::FlycheckHandle;
|
||||||
use ide::{Analysis, AnalysisHost, Cancellable, Change, FileId};
|
use ide::{Analysis, AnalysisHost, Cancellable, Change, FileId};
|
||||||
use ide_db::base_db::{CrateId, FileLoader, SourceDatabase, SourceDatabaseExt};
|
use ide_db::base_db::{CrateId, FileLoader, SourceDatabase};
|
||||||
use lsp_types::{SemanticTokens, Url};
|
use lsp_types::{SemanticTokens, Url};
|
||||||
use parking_lot::{Mutex, RwLock};
|
use parking_lot::{Mutex, RwLock};
|
||||||
use proc_macro_api::ProcMacroServer;
|
use proc_macro_api::ProcMacroServer;
|
||||||
|
@ -176,9 +176,9 @@ impl GlobalState {
|
||||||
|
|
||||||
pub(crate) fn process_changes(&mut self) -> bool {
|
pub(crate) fn process_changes(&mut self) -> bool {
|
||||||
let _p = profile::span("GlobalState::process_changes");
|
let _p = profile::span("GlobalState::process_changes");
|
||||||
let mut fs_refresh_changes = Vec::new();
|
|
||||||
// A file was added or deleted
|
// A file was added or deleted
|
||||||
let mut has_structure_changes = false;
|
let mut has_structure_changes = false;
|
||||||
|
let mut workspace_structure_change = None;
|
||||||
|
|
||||||
let (change, changed_files) = {
|
let (change, changed_files) = {
|
||||||
let mut change = Change::new();
|
let mut change = Change::new();
|
||||||
|
@ -192,7 +192,7 @@ impl GlobalState {
|
||||||
if let Some(path) = vfs.file_path(file.file_id).as_path() {
|
if let Some(path) = vfs.file_path(file.file_id).as_path() {
|
||||||
let path = path.to_path_buf();
|
let path = path.to_path_buf();
|
||||||
if reload::should_refresh_for_change(&path, file.change_kind) {
|
if reload::should_refresh_for_change(&path, file.change_kind) {
|
||||||
fs_refresh_changes.push((path, file.file_id));
|
workspace_structure_change = Some(path);
|
||||||
}
|
}
|
||||||
if file.is_created_or_deleted() {
|
if file.is_created_or_deleted() {
|
||||||
has_structure_changes = true;
|
has_structure_changes = true;
|
||||||
|
@ -227,11 +227,10 @@ impl GlobalState {
|
||||||
|
|
||||||
{
|
{
|
||||||
let raw_database = self.analysis_host.raw_database();
|
let raw_database = self.analysis_host.raw_database();
|
||||||
let workspace_structure_change =
|
// FIXME: ideally we should only trigger a workspace fetch for non-library changes
|
||||||
fs_refresh_changes.into_iter().find(|&(_, file_id)| {
|
// but somethings going wrong with the source root business when we add a new local
|
||||||
!raw_database.source_root(raw_database.file_source_root(file_id)).is_library
|
// crate see https://github.com/rust-lang/rust-analyzer/issues/13029
|
||||||
});
|
if let Some(path) = workspace_structure_change {
|
||||||
if let Some((path, _)) = workspace_structure_change {
|
|
||||||
self.fetch_workspaces_queue
|
self.fetch_workspaces_queue
|
||||||
.request_op(format!("workspace vfs file change: {}", path.display()));
|
.request_op(format!("workspace vfs file change: {}", path.display()));
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,6 +51,12 @@ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result<
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn handle_cancel_flycheck(state: &mut GlobalState, _: ()) -> Result<()> {
|
||||||
|
let _p = profile::span("handle_stop_flycheck");
|
||||||
|
state.flycheck.iter().for_each(|flycheck| flycheck.cancel());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn handle_analyzer_status(
|
pub(crate) fn handle_analyzer_status(
|
||||||
snap: GlobalStateSnapshot,
|
snap: GlobalStateSnapshot,
|
||||||
params: lsp_ext::AnalyzerStatusParams,
|
params: lsp_ext::AnalyzerStatusParams,
|
||||||
|
@ -703,10 +709,8 @@ pub(crate) fn handle_runnables(
|
||||||
|
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
for runnable in snap.analysis.runnables(file_id)? {
|
for runnable in snap.analysis.runnables(file_id)? {
|
||||||
if let Some(offset) = offset {
|
if should_skip_for_offset(&runnable, offset) {
|
||||||
if !runnable.nav.full_range.contains_inclusive(offset) {
|
continue;
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if should_skip_target(&runnable, cargo_spec.as_ref()) {
|
if should_skip_target(&runnable, cargo_spec.as_ref()) {
|
||||||
continue;
|
continue;
|
||||||
|
@ -772,6 +776,14 @@ pub(crate) fn handle_runnables(
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn should_skip_for_offset(runnable: &Runnable, offset: Option<TextSize>) -> bool {
|
||||||
|
match offset {
|
||||||
|
None => false,
|
||||||
|
_ if matches!(&runnable.kind, RunnableKind::TestMod { .. }) => false,
|
||||||
|
Some(offset) => !runnable.nav.full_range.contains_inclusive(offset),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn handle_related_tests(
|
pub(crate) fn handle_related_tests(
|
||||||
snap: GlobalStateSnapshot,
|
snap: GlobalStateSnapshot,
|
||||||
params: lsp_types::TextDocumentPositionParams,
|
params: lsp_types::TextDocumentPositionParams,
|
||||||
|
@ -1765,7 +1777,7 @@ fn run_rustfmt(
|
||||||
|
|
||||||
let line_index = snap.file_line_index(file_id)?;
|
let line_index = snap.file_line_index(file_id)?;
|
||||||
|
|
||||||
let mut rustfmt = match snap.config.rustfmt() {
|
let mut command = match snap.config.rustfmt() {
|
||||||
RustfmtConfig::Rustfmt { extra_args, enable_range_formatting } => {
|
RustfmtConfig::Rustfmt { extra_args, enable_range_formatting } => {
|
||||||
let mut cmd = process::Command::new(toolchain::rustfmt());
|
let mut cmd = process::Command::new(toolchain::rustfmt());
|
||||||
cmd.args(extra_args);
|
cmd.args(extra_args);
|
||||||
|
@ -1830,12 +1842,12 @@ fn run_rustfmt(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut rustfmt = rustfmt
|
let mut rustfmt = command
|
||||||
.stdin(Stdio::piped())
|
.stdin(Stdio::piped())
|
||||||
.stdout(Stdio::piped())
|
.stdout(Stdio::piped())
|
||||||
.stderr(Stdio::piped())
|
.stderr(Stdio::piped())
|
||||||
.spawn()
|
.spawn()
|
||||||
.context(format!("Failed to spawn {:?}", rustfmt))?;
|
.context(format!("Failed to spawn {:?}", command))?;
|
||||||
|
|
||||||
rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?;
|
rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?;
|
||||||
|
|
||||||
|
@ -1854,7 +1866,11 @@ fn run_rustfmt(
|
||||||
// formatting because otherwise an error is surfaced to the user on top of the
|
// formatting because otherwise an error is surfaced to the user on top of the
|
||||||
// syntax error diagnostics they're already receiving. This is especially jarring
|
// syntax error diagnostics they're already receiving. This is especially jarring
|
||||||
// if they have format on save enabled.
|
// if they have format on save enabled.
|
||||||
tracing::info!("rustfmt exited with status 1, assuming parse error and ignoring");
|
tracing::warn!(
|
||||||
|
?command,
|
||||||
|
%captured_stderr,
|
||||||
|
"rustfmt exited with status 1"
|
||||||
|
);
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
|
|
|
@ -129,6 +129,14 @@ pub struct ExpandedMacro {
|
||||||
pub expansion: String,
|
pub expansion: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub enum CancelFlycheck {}
|
||||||
|
|
||||||
|
impl Request for CancelFlycheck {
|
||||||
|
type Params = ();
|
||||||
|
type Result = ();
|
||||||
|
const METHOD: &'static str = "rust-analyzer/cancelFlycheck";
|
||||||
|
}
|
||||||
|
|
||||||
pub enum MatchingBrace {}
|
pub enum MatchingBrace {}
|
||||||
|
|
||||||
impl Request for MatchingBrace {
|
impl Request for MatchingBrace {
|
||||||
|
|
|
@ -288,7 +288,7 @@ impl GlobalState {
|
||||||
|
|
||||||
if became_quiescent {
|
if became_quiescent {
|
||||||
// Project has loaded properly, kick off initial flycheck
|
// Project has loaded properly, kick off initial flycheck
|
||||||
self.flycheck.iter().for_each(FlycheckHandle::update);
|
self.flycheck.iter().for_each(FlycheckHandle::restart);
|
||||||
if self.config.prefill_caches() {
|
if self.config.prefill_caches() {
|
||||||
self.prime_caches_queue.request_op("became quiescent".to_string());
|
self.prime_caches_queue.request_op("became quiescent".to_string());
|
||||||
}
|
}
|
||||||
|
@ -590,6 +590,7 @@ impl GlobalState {
|
||||||
.on_sync_mut::<lsp_ext::ReloadWorkspace>(handlers::handle_workspace_reload)
|
.on_sync_mut::<lsp_ext::ReloadWorkspace>(handlers::handle_workspace_reload)
|
||||||
.on_sync_mut::<lsp_ext::MemoryUsage>(handlers::handle_memory_usage)
|
.on_sync_mut::<lsp_ext::MemoryUsage>(handlers::handle_memory_usage)
|
||||||
.on_sync_mut::<lsp_ext::ShuffleCrateGraph>(handlers::handle_shuffle_crate_graph)
|
.on_sync_mut::<lsp_ext::ShuffleCrateGraph>(handlers::handle_shuffle_crate_graph)
|
||||||
|
.on_sync_mut::<lsp_ext::CancelFlycheck>(handlers::handle_cancel_flycheck)
|
||||||
.on_sync::<lsp_ext::JoinLines>(handlers::handle_join_lines)
|
.on_sync::<lsp_ext::JoinLines>(handlers::handle_join_lines)
|
||||||
.on_sync::<lsp_ext::OnEnter>(handlers::handle_on_enter)
|
.on_sync::<lsp_ext::OnEnter>(handlers::handle_on_enter)
|
||||||
.on_sync::<lsp_types::request::SelectionRangeRequest>(handlers::handle_selection_range)
|
.on_sync::<lsp_types::request::SelectionRangeRequest>(handlers::handle_selection_range)
|
||||||
|
@ -779,7 +780,7 @@ impl GlobalState {
|
||||||
for (id, _) in workspace_ids.clone() {
|
for (id, _) in workspace_ids.clone() {
|
||||||
if id == flycheck.id() {
|
if id == flycheck.id() {
|
||||||
updated = true;
|
updated = true;
|
||||||
flycheck.update();
|
flycheck.restart();
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -798,7 +799,7 @@ impl GlobalState {
|
||||||
// No specific flycheck was triggered, so let's trigger all of them.
|
// No specific flycheck was triggered, so let's trigger all of them.
|
||||||
if !updated {
|
if !updated {
|
||||||
for flycheck in &this.flycheck {
|
for flycheck in &this.flycheck {
|
||||||
flycheck.update();
|
flycheck.restart();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
//! Things which exist to solve practial issues, but which shouldn't exist.
|
//! Things which exist to solve practical issues, but which shouldn't exist.
|
||||||
//!
|
//!
|
||||||
//! Please avoid adding new usages of the functions in this module
|
//! Please avoid adding new usages of the functions in this module
|
||||||
|
|
||||||
|
|
|
@ -471,6 +471,21 @@ pub mod future {
|
||||||
#[lang = "poll"]
|
#[lang = "poll"]
|
||||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output>;
|
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub trait IntoFuture {
|
||||||
|
type Output;
|
||||||
|
type IntoFuture: Future<Output = Self::Output>;
|
||||||
|
#[lang = "into_future"]
|
||||||
|
fn into_future(self) -> Self::IntoFuture;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<F: Future> IntoFuture for F {
|
||||||
|
type Output = F::Output;
|
||||||
|
type IntoFuture = F;
|
||||||
|
fn into_future(self) -> F {
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
pub mod task {
|
pub mod task {
|
||||||
pub enum Poll<T> {
|
pub enum Poll<T> {
|
||||||
|
|
|
@ -14,7 +14,7 @@ tracing = "0.1.35"
|
||||||
jod-thread = "0.1.2"
|
jod-thread = "0.1.2"
|
||||||
walkdir = "2.3.2"
|
walkdir = "2.3.2"
|
||||||
crossbeam-channel = "0.5.5"
|
crossbeam-channel = "0.5.5"
|
||||||
notify = "=5.0.0-pre.15"
|
notify = "=5.0.0-pre.16"
|
||||||
|
|
||||||
vfs = { path = "../vfs", version = "0.0.0" }
|
vfs = { path = "../vfs", version = "0.0.0" }
|
||||||
paths = { path = "../paths", version = "0.0.0" }
|
paths = { path = "../paths", version = "0.0.0" }
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
use std::fs;
|
use std::fs;
|
||||||
|
|
||||||
use crossbeam_channel::{never, select, unbounded, Receiver, Sender};
|
use crossbeam_channel::{never, select, unbounded, Receiver, Sender};
|
||||||
use notify::{RecommendedWatcher, RecursiveMode, Watcher};
|
use notify::{Config, RecommendedWatcher, RecursiveMode, Watcher};
|
||||||
use paths::{AbsPath, AbsPathBuf};
|
use paths::{AbsPath, AbsPathBuf};
|
||||||
use vfs::loader;
|
use vfs::loader;
|
||||||
use walkdir::WalkDir;
|
use walkdir::WalkDir;
|
||||||
|
@ -91,9 +91,12 @@ impl NotifyActor {
|
||||||
self.watcher = None;
|
self.watcher = None;
|
||||||
if !config.watch.is_empty() {
|
if !config.watch.is_empty() {
|
||||||
let (watcher_sender, watcher_receiver) = unbounded();
|
let (watcher_sender, watcher_receiver) = unbounded();
|
||||||
let watcher = log_notify_error(RecommendedWatcher::new(move |event| {
|
let watcher = log_notify_error(RecommendedWatcher::new(
|
||||||
watcher_sender.send(event).unwrap();
|
move |event| {
|
||||||
}));
|
watcher_sender.send(event).unwrap();
|
||||||
|
},
|
||||||
|
Config::default(),
|
||||||
|
));
|
||||||
self.watcher = watcher.map(|it| (it, watcher_receiver));
|
self.watcher = watcher.map(|it| (it, watcher_receiver));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -64,7 +64,7 @@ pub struct FileId(pub u32);
|
||||||
|
|
||||||
/// Storage for all files read by rust-analyzer.
|
/// Storage for all files read by rust-analyzer.
|
||||||
///
|
///
|
||||||
/// For more informations see the [crate-level](crate) documentation.
|
/// For more information see the [crate-level](crate) documentation.
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct Vfs {
|
pub struct Vfs {
|
||||||
interner: PathInterner,
|
interner: PathInterner,
|
||||||
|
|
|
@ -485,7 +485,7 @@ Mind the code--architecture gap: at the moment, we are using fewer feature flags
|
||||||
### Serialization
|
### Serialization
|
||||||
|
|
||||||
In Rust, it is easy (often too easy) to add serialization to any type by adding `#[derive(Serialize)]`.
|
In Rust, it is easy (often too easy) to add serialization to any type by adding `#[derive(Serialize)]`.
|
||||||
This easiness is misleading -- serializable types impose significant backwards compatability constraints.
|
This easiness is misleading -- serializable types impose significant backwards compatibility constraints.
|
||||||
If a type is serializable, then it is a part of some IPC boundary.
|
If a type is serializable, then it is a part of some IPC boundary.
|
||||||
You often don't control the other side of this boundary, so changing serializable types is hard.
|
You often don't control the other side of this boundary, so changing serializable types is hard.
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
<!---
|
<!---
|
||||||
lsp_ext.rs hash: 2a188defec26cc7c
|
lsp_ext.rs hash: 7b710095d773b978
|
||||||
|
|
||||||
If you need to change the above hash to make the test pass, please check if you
|
If you need to change the above hash to make the test pass, please check if you
|
||||||
need to adjust this doc as well and ping this issue:
|
need to adjust this doc as well and ping this issue:
|
||||||
|
|
|
@ -118,6 +118,10 @@ If you're changing this because you're using some tool wrapping
|
||||||
Cargo, you might also want to change
|
Cargo, you might also want to change
|
||||||
`#rust-analyzer.cargo.buildScripts.overrideCommand#`.
|
`#rust-analyzer.cargo.buildScripts.overrideCommand#`.
|
||||||
|
|
||||||
|
If there are multiple linked projects, this command is invoked for
|
||||||
|
each of them, with the working directory being the project root
|
||||||
|
(i.e., the folder containing the `Cargo.toml`).
|
||||||
|
|
||||||
An example command would be:
|
An example command would be:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
@ -318,6 +322,12 @@ Whether to show `Run` action. Only applies when
|
||||||
--
|
--
|
||||||
Whether to show documentation on hover.
|
Whether to show documentation on hover.
|
||||||
--
|
--
|
||||||
|
[[rust-analyzer.hover.documentation.keywords.enable]]rust-analyzer.hover.documentation.keywords.enable (default: `true`)::
|
||||||
|
+
|
||||||
|
--
|
||||||
|
Whether to show keyword hover popups. Only applies when
|
||||||
|
`#rust-analyzer.hover.documentation.enable#` is set.
|
||||||
|
--
|
||||||
[[rust-analyzer.hover.links.enable]]rust-analyzer.hover.links.enable (default: `true`)::
|
[[rust-analyzer.hover.links.enable]]rust-analyzer.hover.links.enable (default: `true`)::
|
||||||
+
|
+
|
||||||
--
|
--
|
||||||
|
|
|
@ -235,6 +235,11 @@
|
||||||
"command": "rust-analyzer.moveItemDown",
|
"command": "rust-analyzer.moveItemDown",
|
||||||
"title": "Move item down",
|
"title": "Move item down",
|
||||||
"category": "rust-analyzer"
|
"category": "rust-analyzer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"command": "rust-analyzer.cancelFlycheck",
|
||||||
|
"title": "Cancel running flychecks",
|
||||||
|
"category": "rust-analyzer"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"keybindings": [
|
"keybindings": [
|
||||||
|
@ -542,7 +547,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"rust-analyzer.checkOnSave.overrideCommand": {
|
"rust-analyzer.checkOnSave.overrideCommand": {
|
||||||
"markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefor include `--message-format=json` or a similar option.\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.",
|
"markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefor include `--message-format=json` or a similar option.\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects, this command is invoked for\neach of them, with the working directory being the project root\n(i.e., the folder containing the `Cargo.toml`).\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.",
|
||||||
"default": null,
|
"default": null,
|
||||||
"type": [
|
"type": [
|
||||||
"null",
|
"null",
|
||||||
|
@ -756,6 +761,11 @@
|
||||||
"default": true,
|
"default": true,
|
||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
},
|
},
|
||||||
|
"rust-analyzer.hover.documentation.keywords.enable": {
|
||||||
|
"markdownDescription": "Whether to show keyword hover popups. Only applies when\n`#rust-analyzer.hover.documentation.enable#` is set.",
|
||||||
|
"default": true,
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
"rust-analyzer.hover.links.enable": {
|
"rust-analyzer.hover.links.enable": {
|
||||||
"markdownDescription": "Use markdown syntax for links in hover.",
|
"markdownDescription": "Use markdown syntax for links in hover.",
|
||||||
"default": true,
|
"default": true,
|
||||||
|
|
|
@ -433,7 +433,7 @@ export function syntaxTree(ctx: Ctx): Cmd {
|
||||||
// The contents of the file come from the `TextDocumentContentProvider`
|
// The contents of the file come from the `TextDocumentContentProvider`
|
||||||
export function viewHir(ctx: Ctx): Cmd {
|
export function viewHir(ctx: Ctx): Cmd {
|
||||||
const tdcp = new (class implements vscode.TextDocumentContentProvider {
|
const tdcp = new (class implements vscode.TextDocumentContentProvider {
|
||||||
readonly uri = vscode.Uri.parse("rust-analyzer-hir://viewHir/hir.txt");
|
readonly uri = vscode.Uri.parse("rust-analyzer-hir://viewHir/hir.rs");
|
||||||
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
|
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
|
||||||
constructor() {
|
constructor() {
|
||||||
vscode.workspace.onDidChangeTextDocument(
|
vscode.workspace.onDidChangeTextDocument(
|
||||||
|
@ -655,7 +655,7 @@ function crateGraph(ctx: Ctx, full: boolean): Cmd {
|
||||||
html, body { margin:0; padding:0; overflow:hidden }
|
html, body { margin:0; padding:0; overflow:hidden }
|
||||||
svg { position:fixed; top:0; left:0; height:100%; width:100% }
|
svg { position:fixed; top:0; left:0; height:100%; width:100% }
|
||||||
|
|
||||||
/* Disable the graphviz backgroud and fill the polygons */
|
/* Disable the graphviz background and fill the polygons */
|
||||||
.graph > polygon { display:none; }
|
.graph > polygon { display:none; }
|
||||||
:is(.node,.edge) polygon { fill: white; }
|
:is(.node,.edge) polygon { fill: white; }
|
||||||
|
|
||||||
|
@ -817,6 +817,12 @@ export function openDocs(ctx: Ctx): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function cancelFlycheck(ctx: Ctx): Cmd {
|
||||||
|
return async () => {
|
||||||
|
await ctx.client.sendRequest(ra.cancelFlycheck);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
export function resolveCodeAction(ctx: Ctx): Cmd {
|
export function resolveCodeAction(ctx: Ctx): Cmd {
|
||||||
const client = ctx.client;
|
const client = ctx.client;
|
||||||
return async (params: lc.CodeAction) => {
|
return async (params: lc.CodeAction) => {
|
||||||
|
|
|
@ -5,8 +5,6 @@ import { log } from "./util";
|
||||||
|
|
||||||
export type UpdatesChannel = "stable" | "nightly";
|
export type UpdatesChannel = "stable" | "nightly";
|
||||||
|
|
||||||
const NIGHTLY_TAG = "nightly";
|
|
||||||
|
|
||||||
export type RunnableEnvCfg =
|
export type RunnableEnvCfg =
|
||||||
| undefined
|
| undefined
|
||||||
| Record<string, string>
|
| Record<string, string>
|
||||||
|
@ -175,10 +173,6 @@ export class Config {
|
||||||
gotoTypeDef: this.get<boolean>("hover.actions.gotoTypeDef.enable"),
|
gotoTypeDef: this.get<boolean>("hover.actions.gotoTypeDef.enable"),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
get currentExtensionIsNightly() {
|
|
||||||
return this.package.releaseTag === NIGHTLY_TAG;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function updateConfig(config: vscode.WorkspaceConfiguration) {
|
export async function updateConfig(config: vscode.WorkspaceConfiguration) {
|
||||||
|
|
|
@ -75,6 +75,23 @@ export const expandMacro = new lc.RequestType<ExpandMacroParams, ExpandedMacro |
|
||||||
"rust-analyzer/expandMacro"
|
"rust-analyzer/expandMacro"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
export const relatedTests = new lc.RequestType<lc.TextDocumentPositionParams, TestInfo[], void>(
|
||||||
|
"rust-analyzer/relatedTests"
|
||||||
|
);
|
||||||
|
|
||||||
|
export const cancelFlycheck = new lc.RequestType0<void, void>("rust-analyzer/cancelFlycheck");
|
||||||
|
|
||||||
|
// Experimental extensions
|
||||||
|
|
||||||
|
export interface SsrParams {
|
||||||
|
query: string;
|
||||||
|
parseOnly: boolean;
|
||||||
|
textDocument: lc.TextDocumentIdentifier;
|
||||||
|
position: lc.Position;
|
||||||
|
selections: readonly lc.Range[];
|
||||||
|
}
|
||||||
|
export const ssr = new lc.RequestType<SsrParams, lc.WorkspaceEdit, void>("experimental/ssr");
|
||||||
|
|
||||||
export interface MatchingBraceParams {
|
export interface MatchingBraceParams {
|
||||||
textDocument: lc.TextDocumentIdentifier;
|
textDocument: lc.TextDocumentIdentifier;
|
||||||
positions: lc.Position[];
|
positions: lc.Position[];
|
||||||
|
@ -127,19 +144,6 @@ export interface TestInfo {
|
||||||
runnable: Runnable;
|
runnable: Runnable;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const relatedTests = new lc.RequestType<lc.TextDocumentPositionParams, TestInfo[], void>(
|
|
||||||
"rust-analyzer/relatedTests"
|
|
||||||
);
|
|
||||||
|
|
||||||
export interface SsrParams {
|
|
||||||
query: string;
|
|
||||||
parseOnly: boolean;
|
|
||||||
textDocument: lc.TextDocumentIdentifier;
|
|
||||||
position: lc.Position;
|
|
||||||
selections: readonly lc.Range[];
|
|
||||||
}
|
|
||||||
export const ssr = new lc.RequestType<SsrParams, lc.WorkspaceEdit, void>("experimental/ssr");
|
|
||||||
|
|
||||||
export interface CommandLink extends lc.Command {
|
export interface CommandLink extends lc.Command {
|
||||||
/**
|
/**
|
||||||
* A tooltip for the command, when represented in the UI.
|
* A tooltip for the command, when represented in the UI.
|
||||||
|
|
|
@ -163,6 +163,7 @@ async function initCommonContext(context: vscode.ExtensionContext, ctx: Ctx) {
|
||||||
ctx.registerCommand("peekTests", commands.peekTests);
|
ctx.registerCommand("peekTests", commands.peekTests);
|
||||||
ctx.registerCommand("moveItemUp", commands.moveItemUp);
|
ctx.registerCommand("moveItemUp", commands.moveItemUp);
|
||||||
ctx.registerCommand("moveItemDown", commands.moveItemDown);
|
ctx.registerCommand("moveItemDown", commands.moveItemDown);
|
||||||
|
ctx.registerCommand("cancelFlycheck", commands.cancelFlycheck);
|
||||||
|
|
||||||
defaultOnEnter.dispose();
|
defaultOnEnter.dispose();
|
||||||
ctx.registerCommand("onEnter", commands.onEnter);
|
ctx.registerCommand("onEnter", commands.onEnter);
|
||||||
|
|
|
@ -158,7 +158,7 @@ export const getPathForExecutable = memoizeAsync(
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// hmm, `os.homedir()` seems to be infallible
|
// hmm, `os.homedir()` seems to be infallible
|
||||||
// it is not mentioned in docs and cannot be infered by the type signature...
|
// it is not mentioned in docs and cannot be inferred by the type signature...
|
||||||
const standardPath = vscode.Uri.joinPath(
|
const standardPath = vscode.Uri.joinPath(
|
||||||
vscode.Uri.file(os.homedir()),
|
vscode.Uri.file(os.homedir()),
|
||||||
".cargo",
|
".cargo",
|
||||||
|
|
Loading…
Reference in a new issue