mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-30 15:03:42 +00:00
Merge branch 'master' of https://github.com/rust-analyzer/rust-analyzer into feature/themes
This commit is contained in:
commit
c622413bc7
37 changed files with 907 additions and 777 deletions
67
Cargo.lock
generated
67
Cargo.lock
generated
|
@ -8,6 +8,11 @@ dependencies = [
|
||||||
"memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anyhow"
|
||||||
|
version = "1.0.19"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arrayvec"
|
name = "arrayvec"
|
||||||
version = "0.5.1"
|
version = "0.5.1"
|
||||||
|
@ -122,7 +127,7 @@ source = "git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b95
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"syn 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
"syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -187,7 +192,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)",
|
"num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)",
|
"time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -310,7 +315,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"syn 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
"syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -348,7 +353,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "filetime"
|
name = "filetime"
|
||||||
version = "0.2.7"
|
version = "0.2.8"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -465,7 +470,7 @@ name = "heck"
|
||||||
version = "0.3.1"
|
version = "0.3.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-segmentation 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"unicode-segmentation 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -748,7 +753,7 @@ version = "4.0.14"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"filetime 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
"filetime 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"fsevent 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"fsevent 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"fsevent-sys 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"fsevent-sys 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"inotify 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"inotify 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -766,12 +771,12 @@ version = "0.1.41"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "num-traits"
|
name = "num-traits"
|
||||||
version = "0.2.8"
|
version = "0.2.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -779,7 +784,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "num_cpus"
|
name = "num_cpus"
|
||||||
version = "1.11.0"
|
version = "1.11.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"hermit-abi 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"hermit-abi 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -842,7 +847,7 @@ dependencies = [
|
||||||
"proc-macro-hack 0.5.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
"proc-macro-hack 0.5.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"syn 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
"syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -876,7 +881,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"syn 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
"syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -896,7 +901,7 @@ dependencies = [
|
||||||
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -1091,7 +1096,7 @@ dependencies = [
|
||||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"superslice 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"superslice 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"test_utils 0.1.0",
|
"test_utils 0.1.0",
|
||||||
"unicase 2.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"unicase 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1383,7 +1388,7 @@ dependencies = [
|
||||||
"crossbeam-queue 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"crossbeam-queue 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"num_cpus 1.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"num_cpus 1.11.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1506,7 +1511,7 @@ dependencies = [
|
||||||
"heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"syn 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
"syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1551,7 +1556,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"syn 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
"syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1571,7 +1576,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"syn 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
"syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1625,7 +1630,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "1.0.7"
|
version = "1.0.8"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -1681,7 +1686,7 @@ name = "threadpool"
|
||||||
version = "1.7.1"
|
version = "1.7.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"num_cpus 1.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"num_cpus 1.11.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1696,10 +1701,10 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicase"
|
name = "unicase"
|
||||||
version = "2.5.1"
|
version = "2.6.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"version_check 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1720,7 +1725,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-segmentation"
|
name = "unicode-segmentation"
|
||||||
version = "1.5.0"
|
version = "1.6.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1755,7 +1760,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "version_check"
|
name = "version_check"
|
||||||
version = "0.1.5"
|
version = "0.9.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1823,6 +1828,7 @@ dependencies = [
|
||||||
name = "xtask"
|
name = "xtask"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"anyhow 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"pico-args 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"pico-args 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -1846,6 +1852,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
"checksum aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d"
|
"checksum aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d"
|
||||||
|
"checksum anyhow 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)" = "57114fc2a6cc374bce195d3482057c846e706d252ff3604363449695684d7a0d"
|
||||||
"checksum arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cff77d8686867eceff3105329d4698d96c2391c176d5d03adc90c7389162b5b8"
|
"checksum arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cff77d8686867eceff3105329d4698d96c2391c176d5d03adc90c7389162b5b8"
|
||||||
"checksum atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "1803c647a3ec87095e7ae7acfca019e98de5ec9a7d01343f611cf3152ed71a90"
|
"checksum atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "1803c647a3ec87095e7ae7acfca019e98de5ec9a7d01343f611cf3152ed71a90"
|
||||||
"checksum autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2"
|
"checksum autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2"
|
||||||
|
@ -1886,7 +1893,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
"checksum either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3"
|
"checksum either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3"
|
||||||
"checksum ena 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8944dc8fa28ce4a38f778bd46bf7d923fe73eed5a439398507246c8e017e6f36"
|
"checksum ena 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8944dc8fa28ce4a38f778bd46bf7d923fe73eed5a439398507246c8e017e6f36"
|
||||||
"checksum encode_unicode 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
|
"checksum encode_unicode 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
|
||||||
"checksum filetime 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "6bd7380b54ced79dda72ecc35cc4fbbd1da6bba54afaa37e96fd1c2a308cd469"
|
"checksum filetime 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "1ff6d4dab0aa0c8e6346d46052e93b13a16cf847b54ed357087c35011048cc7d"
|
||||||
"checksum fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "86d4de0081402f5e88cdac65c8dcdcc73118c1a7a465e2a05f0da05843a8ea33"
|
"checksum fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "86d4de0081402f5e88cdac65c8dcdcc73118c1a7a465e2a05f0da05843a8ea33"
|
||||||
"checksum flexi_logger 0.14.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a13ea6b8a4debecf47bf3966d56db0e21366bc3a3649ba159e1a9e6fdd36a4f4"
|
"checksum flexi_logger 0.14.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a13ea6b8a4debecf47bf3966d56db0e21366bc3a3649ba159e1a9e6fdd36a4f4"
|
||||||
"checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3"
|
"checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3"
|
||||||
|
@ -1937,8 +1944,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
"checksum net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "42550d9fb7b6684a6d404d9fa7250c2eb2646df731d1c06afc06dcee9e1bcf88"
|
"checksum net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "42550d9fb7b6684a6d404d9fa7250c2eb2646df731d1c06afc06dcee9e1bcf88"
|
||||||
"checksum notify 4.0.14 (registry+https://github.com/rust-lang/crates.io-index)" = "199628fc33b21bc767baa057490b00b382ecbae030803a7b36292422d15b778b"
|
"checksum notify 4.0.14 (registry+https://github.com/rust-lang/crates.io-index)" = "199628fc33b21bc767baa057490b00b382ecbae030803a7b36292422d15b778b"
|
||||||
"checksum num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)" = "b85e541ef8255f6cf42bbfe4ef361305c6c135d10919ecc26126c4e5ae94bc09"
|
"checksum num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)" = "b85e541ef8255f6cf42bbfe4ef361305c6c135d10919ecc26126c4e5ae94bc09"
|
||||||
"checksum num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "6ba9a427cfca2be13aa6f6403b0b7e7368fe982bfa16fccc450ce74c46cd9b32"
|
"checksum num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "443c53b3c3531dfcbfa499d8893944db78474ad7a1d87fa2d94d1a2231693ac6"
|
||||||
"checksum num_cpus 1.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "155394f924cdddf08149da25bfb932d226b4a593ca7468b08191ff6335941af5"
|
"checksum num_cpus 1.11.1 (registry+https://github.com/rust-lang/crates.io-index)" = "76dac5ed2a876980778b8b85f75a71b6cbf0db0b1232ee12f826bccb00d09d72"
|
||||||
"checksum number_prefix 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "17b02fc0ff9a9e4b35b3342880f48e896ebf69f2967921fe8646bf5b7125956a"
|
"checksum number_prefix 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "17b02fc0ff9a9e4b35b3342880f48e896ebf69f2967921fe8646bf5b7125956a"
|
||||||
"checksum once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "891f486f630e5c5a4916c7e16c4b24a53e78c860b646e9f8e005e4f16847bfed"
|
"checksum once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "891f486f630e5c5a4916c7e16c4b24a53e78c860b646e9f8e005e4f16847bfed"
|
||||||
"checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063"
|
"checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063"
|
||||||
|
@ -2003,22 +2010,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
"checksum smol_str 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "34836c9a295c62c2ce3514471117c5cb269891e8421b2aafdd910050576c4d8b"
|
"checksum smol_str 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "34836c9a295c62c2ce3514471117c5cb269891e8421b2aafdd910050576c4d8b"
|
||||||
"checksum stacker 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "d96fc4f13a0ac088e9a3cd9af1cc8c5cc1ab5deb2145cef661267dfc9c542f8a"
|
"checksum stacker 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "d96fc4f13a0ac088e9a3cd9af1cc8c5cc1ab5deb2145cef661267dfc9c542f8a"
|
||||||
"checksum superslice 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ab16ced94dbd8a46c82fd81e3ed9a8727dac2977ea869d217bcc4ea1f122e81f"
|
"checksum superslice 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ab16ced94dbd8a46c82fd81e3ed9a8727dac2977ea869d217bcc4ea1f122e81f"
|
||||||
"checksum syn 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "0e7bedb3320d0f3035594b0b723c8a28d7d336a3eda3881db79e61d676fb644c"
|
"checksum syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "661641ea2aa15845cddeb97dad000d22070bb5c1fb456b96c1cba883ec691e92"
|
||||||
"checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9"
|
"checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9"
|
||||||
"checksum termios 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "72b620c5ea021d75a735c943269bb07d30c9b77d6ac6b236bc8b5c496ef05625"
|
"checksum termios 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "72b620c5ea021d75a735c943269bb07d30c9b77d6ac6b236bc8b5c496ef05625"
|
||||||
"checksum text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e08bbcb7a3adbda0eb23431206b653bdad3d8dea311e72d36bf2215e27a42579"
|
"checksum text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e08bbcb7a3adbda0eb23431206b653bdad3d8dea311e72d36bf2215e27a42579"
|
||||||
"checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b"
|
"checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b"
|
||||||
"checksum threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e2f0c90a5f3459330ac8bc0d2f879c693bb7a2f59689c1083fc4ef83834da865"
|
"checksum threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e2f0c90a5f3459330ac8bc0d2f879c693bb7a2f59689c1083fc4ef83834da865"
|
||||||
"checksum time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "db8dcfca086c1143c9270ac42a2bbd8a7ee477b78ac8e45b19abfb0cbede4b6f"
|
"checksum time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "db8dcfca086c1143c9270ac42a2bbd8a7ee477b78ac8e45b19abfb0cbede4b6f"
|
||||||
"checksum unicase 2.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2e2e6bd1e59e56598518beb94fd6db628ded570326f0a98c679a304bd9f00150"
|
"checksum unicase 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
|
||||||
"checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
|
"checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
|
||||||
"checksum unicode-normalization 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "09c8070a9942f5e7cfccd93f490fdebd230ee3c3c9f107cb25bad5351ef671cf"
|
"checksum unicode-normalization 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "09c8070a9942f5e7cfccd93f490fdebd230ee3c3c9f107cb25bad5351ef671cf"
|
||||||
"checksum unicode-segmentation 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "49f5526225fd8b77342d5986ab5f6055552e9c0776193b5b63fd53b46debfad7"
|
"checksum unicode-segmentation 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e83e153d1053cbb5a118eeff7fd5be06ed99153f00dbcd8ae310c5fb2b22edc0"
|
||||||
"checksum unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "7007dbd421b92cc6e28410fe7362e2e0a2503394908f417b68ec8d1c364c4e20"
|
"checksum unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "7007dbd421b92cc6e28410fe7362e2e0a2503394908f417b68ec8d1c364c4e20"
|
||||||
"checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
|
"checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
|
||||||
"checksum url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "75b414f6c464c879d7f9babf951f23bc3743fb7313c081b2e6ca719067ea9d61"
|
"checksum url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "75b414f6c464c879d7f9babf951f23bc3743fb7313c081b2e6ca719067ea9d61"
|
||||||
"checksum uuid 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9fde2f6a4bea1d6e007c4ad38c6839fa71cbb63b6dbf5b595aa38dc9b1093c11"
|
"checksum uuid 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9fde2f6a4bea1d6e007c4ad38c6839fa71cbb63b6dbf5b595aa38dc9b1093c11"
|
||||||
"checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd"
|
"checksum version_check 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "078775d0255232fb988e6fccf26ddc9d1ac274299aaedcedce21c6f72cc533ce"
|
||||||
"checksum walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "9658c94fa8b940eab2250bd5a457f9c48b748420d71293b165c8cdbe2f55f71e"
|
"checksum walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "9658c94fa8b940eab2250bd5a457f9c48b748420d71293b165c8cdbe2f55f71e"
|
||||||
"checksum wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b89c3ce4ce14bdc6fb6beaf9ec7928ca331de5df7e5ea278375642a2f478570d"
|
"checksum wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b89c3ce4ce14bdc6fb6beaf9ec7928ca331de5df7e5ea278375642a2f478570d"
|
||||||
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
|
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use std::ops::RangeInclusive;
|
use std::{iter::once, ops::RangeInclusive};
|
||||||
|
|
||||||
use hir::db::HirDatabase;
|
use hir::db::HirDatabase;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
|
@ -38,28 +38,31 @@ use crate::{
|
||||||
// ```
|
// ```
|
||||||
pub(crate) fn convert_to_guarded_return(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
pub(crate) fn convert_to_guarded_return(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||||
let if_expr: ast::IfExpr = ctx.find_node_at_offset()?;
|
let if_expr: ast::IfExpr = ctx.find_node_at_offset()?;
|
||||||
let cond = if_expr.condition()?;
|
|
||||||
let mut if_let_ident: Option<String> = None;
|
|
||||||
|
|
||||||
// Check if there is an IfLet that we can handle.
|
|
||||||
match cond.pat() {
|
|
||||||
None => {} // No IfLet, supported.
|
|
||||||
Some(TupleStructPat(ref pat)) if pat.args().count() == 1usize => match &pat.path() {
|
|
||||||
Some(p) => match p.qualifier() {
|
|
||||||
None => if_let_ident = Some(p.syntax().text().to_string()),
|
|
||||||
_ => return None,
|
|
||||||
},
|
|
||||||
_ => return None,
|
|
||||||
},
|
|
||||||
_ => return None, // Unsupported IfLet.
|
|
||||||
};
|
|
||||||
|
|
||||||
let expr = cond.expr()?;
|
|
||||||
let then_block = if_expr.then_branch()?.block()?;
|
|
||||||
if if_expr.else_branch().is_some() {
|
if if_expr.else_branch().is_some() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let cond = if_expr.condition()?;
|
||||||
|
|
||||||
|
// Check if there is an IfLet that we can handle.
|
||||||
|
let if_let_pat = match cond.pat() {
|
||||||
|
None => None, // No IfLet, supported.
|
||||||
|
Some(TupleStructPat(pat)) if pat.args().count() == 1 => {
|
||||||
|
let path = pat.path()?;
|
||||||
|
match path.qualifier() {
|
||||||
|
None => {
|
||||||
|
let bound_ident = pat.args().next().unwrap();
|
||||||
|
Some((path, bound_ident))
|
||||||
|
}
|
||||||
|
Some(_) => return None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(_) => return None, // Unsupported IfLet.
|
||||||
|
};
|
||||||
|
|
||||||
|
let cond_expr = cond.expr()?;
|
||||||
|
let then_block = if_expr.then_branch()?.block()?;
|
||||||
|
|
||||||
let parent_block = if_expr.syntax().parent()?.ancestors().find_map(ast::Block::cast)?;
|
let parent_block = if_expr.syntax().parent()?.ancestors().find_map(ast::Block::cast)?;
|
||||||
|
|
||||||
if parent_block.expr()? != if_expr.clone().into() {
|
if parent_block.expr()? != if_expr.clone().into() {
|
||||||
|
@ -79,11 +82,11 @@ pub(crate) fn convert_to_guarded_return(ctx: AssistCtx<impl HirDatabase>) -> Opt
|
||||||
|
|
||||||
let parent_container = parent_block.syntax().parent()?.parent()?;
|
let parent_container = parent_block.syntax().parent()?.parent()?;
|
||||||
|
|
||||||
let early_expression = match parent_container.kind() {
|
let early_expression: ast::Expr = match parent_container.kind() {
|
||||||
WHILE_EXPR | LOOP_EXPR => Some("continue"),
|
WHILE_EXPR | LOOP_EXPR => make::expr_continue().into(),
|
||||||
FN_DEF => Some("return"),
|
FN_DEF => make::expr_return().into(),
|
||||||
_ => None,
|
_ => return None,
|
||||||
}?;
|
};
|
||||||
|
|
||||||
if then_block.syntax().first_child_or_token().map(|t| t.kind() == L_CURLY).is_none() {
|
if then_block.syntax().first_child_or_token().map(|t| t.kind() == L_CURLY).is_none() {
|
||||||
return None;
|
return None;
|
||||||
|
@ -94,22 +97,43 @@ pub(crate) fn convert_to_guarded_return(ctx: AssistCtx<impl HirDatabase>) -> Opt
|
||||||
|
|
||||||
ctx.add_assist(AssistId("convert_to_guarded_return"), "convert to guarded return", |edit| {
|
ctx.add_assist(AssistId("convert_to_guarded_return"), "convert to guarded return", |edit| {
|
||||||
let if_indent_level = IndentLevel::from_node(&if_expr.syntax());
|
let if_indent_level = IndentLevel::from_node(&if_expr.syntax());
|
||||||
let new_block = match if_let_ident {
|
let new_block = match if_let_pat {
|
||||||
None => {
|
None => {
|
||||||
// If.
|
// If.
|
||||||
let early_expression = &(early_expression.to_owned() + ";");
|
let early_expression = &(early_expression.syntax().to_string() + ";");
|
||||||
let new_expr =
|
let new_expr = if_indent_level
|
||||||
if_indent_level.increase_indent(make::if_expression(&expr, early_expression));
|
.increase_indent(make::if_expression(&cond_expr, early_expression));
|
||||||
replace(new_expr, &then_block, &parent_block, &if_expr)
|
replace(new_expr.syntax(), &then_block, &parent_block, &if_expr)
|
||||||
}
|
}
|
||||||
Some(if_let_ident) => {
|
Some((path, bound_ident)) => {
|
||||||
// If-let.
|
// If-let.
|
||||||
let new_expr = if_indent_level.increase_indent(make::let_match_early(
|
let match_expr = {
|
||||||
expr,
|
let happy_arm = make::match_arm(
|
||||||
&if_let_ident,
|
once(
|
||||||
early_expression,
|
make::tuple_struct_pat(
|
||||||
));
|
path,
|
||||||
replace(new_expr, &then_block, &parent_block, &if_expr)
|
once(make::bind_pat(make::name("it")).into()),
|
||||||
|
)
|
||||||
|
.into(),
|
||||||
|
),
|
||||||
|
make::expr_path(make::path_from_name_ref(make::name_ref("it"))).into(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let sad_arm = make::match_arm(
|
||||||
|
// FIXME: would be cool to use `None` or `Err(_)` if appropriate
|
||||||
|
once(make::placeholder_pat().into()),
|
||||||
|
early_expression.into(),
|
||||||
|
);
|
||||||
|
|
||||||
|
make::expr_match(cond_expr, make::match_arm_list(vec![happy_arm, sad_arm]))
|
||||||
|
};
|
||||||
|
|
||||||
|
let let_stmt = make::let_stmt(
|
||||||
|
make::bind_pat(make::name(&bound_ident.syntax().to_string())).into(),
|
||||||
|
Some(match_expr.into()),
|
||||||
|
);
|
||||||
|
let let_stmt = if_indent_level.increase_indent(let_stmt);
|
||||||
|
replace(let_stmt.syntax(), &then_block, &parent_block, &if_expr)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
edit.target(if_expr.syntax().text_range());
|
edit.target(if_expr.syntax().text_range());
|
||||||
|
@ -117,7 +141,7 @@ pub(crate) fn convert_to_guarded_return(ctx: AssistCtx<impl HirDatabase>) -> Opt
|
||||||
edit.set_cursor(cursor_position);
|
edit.set_cursor(cursor_position);
|
||||||
|
|
||||||
fn replace(
|
fn replace(
|
||||||
new_expr: impl AstNode,
|
new_expr: &SyntaxNode,
|
||||||
then_block: &Block,
|
then_block: &Block,
|
||||||
parent_block: &Block,
|
parent_block: &Block,
|
||||||
if_expr: &ast::IfExpr,
|
if_expr: &ast::IfExpr,
|
||||||
|
@ -130,7 +154,7 @@ pub(crate) fn convert_to_guarded_return(ctx: AssistCtx<impl HirDatabase>) -> Opt
|
||||||
} else {
|
} else {
|
||||||
end_of_then
|
end_of_then
|
||||||
};
|
};
|
||||||
let mut then_statements = new_expr.syntax().children_with_tokens().chain(
|
let mut then_statements = new_expr.children_with_tokens().chain(
|
||||||
then_block_items
|
then_block_items
|
||||||
.syntax()
|
.syntax()
|
||||||
.children_with_tokens()
|
.children_with_tokens()
|
||||||
|
@ -151,9 +175,10 @@ pub(crate) fn convert_to_guarded_return(ctx: AssistCtx<impl HirDatabase>) -> Opt
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
|
||||||
use crate::helpers::{check_assist, check_assist_not_applicable};
|
use crate::helpers::{check_assist, check_assist_not_applicable};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn convert_inside_fn() {
|
fn convert_inside_fn() {
|
||||||
check_assist(
|
check_assist(
|
||||||
|
@ -204,7 +229,7 @@ mod tests {
|
||||||
bar();
|
bar();
|
||||||
le<|>t n = match n {
|
le<|>t n = match n {
|
||||||
Some(it) => it,
|
Some(it) => it,
|
||||||
None => return,
|
_ => return,
|
||||||
};
|
};
|
||||||
foo(n);
|
foo(n);
|
||||||
|
|
||||||
|
@ -215,6 +240,29 @@ mod tests {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn convert_if_let_result() {
|
||||||
|
check_assist(
|
||||||
|
convert_to_guarded_return,
|
||||||
|
r#"
|
||||||
|
fn main() {
|
||||||
|
if<|> let Ok(x) = Err(92) {
|
||||||
|
foo(x);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
r#"
|
||||||
|
fn main() {
|
||||||
|
le<|>t x = match Err(92) {
|
||||||
|
Ok(it) => it,
|
||||||
|
_ => return,
|
||||||
|
};
|
||||||
|
foo(x);
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn convert_let_ok_inside_fn() {
|
fn convert_let_ok_inside_fn() {
|
||||||
check_assist(
|
check_assist(
|
||||||
|
@ -235,7 +283,7 @@ mod tests {
|
||||||
bar();
|
bar();
|
||||||
le<|>t n = match n {
|
le<|>t n = match n {
|
||||||
Ok(it) => it,
|
Ok(it) => it,
|
||||||
None => return,
|
_ => return,
|
||||||
};
|
};
|
||||||
foo(n);
|
foo(n);
|
||||||
|
|
||||||
|
@ -293,7 +341,7 @@ mod tests {
|
||||||
while true {
|
while true {
|
||||||
le<|>t n = match n {
|
le<|>t n = match n {
|
||||||
Some(it) => it,
|
Some(it) => it,
|
||||||
None => continue,
|
_ => continue,
|
||||||
};
|
};
|
||||||
foo(n);
|
foo(n);
|
||||||
bar();
|
bar();
|
||||||
|
@ -350,7 +398,7 @@ mod tests {
|
||||||
loop {
|
loop {
|
||||||
le<|>t n = match n {
|
le<|>t n = match n {
|
||||||
Some(it) => it,
|
Some(it) => it,
|
||||||
None => continue,
|
_ => continue,
|
||||||
};
|
};
|
||||||
foo(n);
|
foo(n);
|
||||||
bar();
|
bar();
|
||||||
|
|
|
@ -8,6 +8,7 @@ use std::sync::Arc;
|
||||||
|
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
adt::VariantData,
|
adt::VariantData,
|
||||||
|
body::scope::ExprScopes,
|
||||||
builtin_type::BuiltinType,
|
builtin_type::BuiltinType,
|
||||||
type_ref::{Mutability, TypeRef},
|
type_ref::{Mutability, TypeRef},
|
||||||
CrateModuleId, LocalEnumVariantId, LocalStructFieldId, ModuleId, UnionId,
|
CrateModuleId, LocalEnumVariantId, LocalStructFieldId, ModuleId, UnionId,
|
||||||
|
@ -539,6 +540,7 @@ pub trait HasBody: Copy {
|
||||||
fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult>;
|
fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult>;
|
||||||
fn body(self, db: &impl HirDatabase) -> Arc<Body>;
|
fn body(self, db: &impl HirDatabase) -> Arc<Body>;
|
||||||
fn body_source_map(self, db: &impl HirDatabase) -> Arc<BodySourceMap>;
|
fn body_source_map(self, db: &impl HirDatabase) -> Arc<BodySourceMap>;
|
||||||
|
fn expr_scopes(self, db: &impl HirDatabase) -> Arc<ExprScopes>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> HasBody for T
|
impl<T> HasBody for T
|
||||||
|
@ -550,11 +552,15 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
fn body(self, db: &impl HirDatabase) -> Arc<Body> {
|
fn body(self, db: &impl HirDatabase) -> Arc<Body> {
|
||||||
db.body(self.into())
|
self.into().body(db)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn body_source_map(self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
fn body_source_map(self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
||||||
db.body_with_source_map(self.into()).1
|
self.into().body_source_map(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expr_scopes(self, db: &impl HirDatabase) -> Arc<ExprScopes> {
|
||||||
|
self.into().expr_scopes(db)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -564,11 +570,15 @@ impl HasBody for DefWithBody {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn body(self, db: &impl HirDatabase) -> Arc<Body> {
|
fn body(self, db: &impl HirDatabase) -> Arc<Body> {
|
||||||
db.body(self)
|
db.body(self.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn body_source_map(self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
fn body_source_map(self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
||||||
db.body_with_source_map(self).1
|
db.body_with_source_map(self.into()).1
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expr_scopes(self, db: &impl HirDatabase) -> Arc<ExprScopes> {
|
||||||
|
db.expr_scopes(self.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -662,11 +672,11 @@ impl Function {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn body_source_map(self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
pub(crate) fn body_source_map(self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
||||||
db.body_with_source_map(self.into()).1
|
db.body_with_source_map(self.id.into()).1
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn body(self, db: &impl HirDatabase) -> Arc<Body> {
|
pub fn body(self, db: &impl HirDatabase) -> Arc<Body> {
|
||||||
db.body(self.into())
|
db.body(self.id.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ty(self, db: &impl HirDatabase) -> Ty {
|
pub fn ty(self, db: &impl HirDatabase) -> Ty {
|
||||||
|
@ -1079,7 +1089,7 @@ pub struct Local {
|
||||||
|
|
||||||
impl Local {
|
impl Local {
|
||||||
pub fn name(self, db: &impl HirDatabase) -> Option<Name> {
|
pub fn name(self, db: &impl HirDatabase) -> Option<Name> {
|
||||||
let body = db.body(self.parent);
|
let body = self.parent.body(db);
|
||||||
match &body[self.pat_id] {
|
match &body[self.pat_id] {
|
||||||
Pat::Bind { name, .. } => Some(name.clone()),
|
Pat::Bind { name, .. } => Some(name.clone()),
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -1091,7 +1101,7 @@ impl Local {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_mut(self, db: &impl HirDatabase) -> bool {
|
pub fn is_mut(self, db: &impl HirDatabase) -> bool {
|
||||||
let body = db.body(self.parent);
|
let body = self.parent.body(db);
|
||||||
match &body[self.pat_id] {
|
match &body[self.pat_id] {
|
||||||
Pat::Bind { mode, .. } => match mode {
|
Pat::Bind { mode, .. } => match mode {
|
||||||
BindingAnnotation::Mutable | BindingAnnotation::RefMut => true,
|
BindingAnnotation::Mutable | BindingAnnotation::RefMut => true,
|
||||||
|
@ -1115,7 +1125,7 @@ impl Local {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn source(self, db: &impl HirDatabase) -> Source<Either<ast::BindPat, ast::SelfParam>> {
|
pub fn source(self, db: &impl HirDatabase) -> Source<Either<ast::BindPat, ast::SelfParam>> {
|
||||||
let (_body, source_map) = db.body_with_source_map(self.parent);
|
let source_map = self.parent.body_source_map(db);
|
||||||
let src = source_map.pat_syntax(self.pat_id).unwrap(); // Hmm...
|
let src = source_map.pat_syntax(self.pat_id).unwrap(); // Hmm...
|
||||||
let root = src.file_syntax(db);
|
let root = src.file_syntax(db);
|
||||||
src.map(|ast| ast.map(|it| it.cast().unwrap().to_node(&root), |it| it.to_node(&root)))
|
src.map(|ast| ast.map(|it| it.cast().unwrap().to_node(&root), |it| it.to_node(&root)))
|
||||||
|
|
|
@ -8,7 +8,6 @@ use ra_syntax::SmolStr;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
debug::HirDebugDatabase,
|
debug::HirDebugDatabase,
|
||||||
expr::{Body, BodySourceMap},
|
|
||||||
generics::{GenericDef, GenericParams},
|
generics::{GenericDef, GenericParams},
|
||||||
ids,
|
ids,
|
||||||
impl_block::{ImplBlock, ImplSourceMap, ModuleImplBlocks},
|
impl_block::{ImplBlock, ImplSourceMap, ModuleImplBlocks},
|
||||||
|
@ -19,13 +18,14 @@ use crate::{
|
||||||
InferenceResult, Namespace, Substs, Ty, TypableDef, TypeCtor,
|
InferenceResult, Namespace, Substs, Ty, TypableDef, TypeCtor,
|
||||||
},
|
},
|
||||||
type_alias::TypeAliasData,
|
type_alias::TypeAliasData,
|
||||||
Const, ConstData, Crate, DefWithBody, ExprScopes, FnData, Function, Module, Static,
|
Const, ConstData, Crate, DefWithBody, FnData, Function, Module, Static, StructField, Trait,
|
||||||
StructField, Trait, TypeAlias,
|
TypeAlias,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub use hir_def::db::{
|
pub use hir_def::db::{
|
||||||
CrateDefMapQuery, DefDatabase2, DefDatabase2Storage, EnumDataQuery, InternDatabase,
|
BodyQuery, BodyWithSourceMapQuery, CrateDefMapQuery, DefDatabase2, DefDatabase2Storage,
|
||||||
InternDatabaseStorage, RawItemsQuery, RawItemsWithSourceMapQuery, StructDataQuery,
|
EnumDataQuery, ExprScopesQuery, InternDatabase, InternDatabaseStorage, RawItemsQuery,
|
||||||
|
RawItemsWithSourceMapQuery, StructDataQuery,
|
||||||
};
|
};
|
||||||
pub use hir_expand::db::{
|
pub use hir_expand::db::{
|
||||||
AstDatabase, AstDatabaseStorage, AstIdMapQuery, MacroArgQuery, MacroDefQuery, MacroExpandQuery,
|
AstDatabase, AstDatabaseStorage, AstIdMapQuery, MacroArgQuery, MacroDefQuery, MacroExpandQuery,
|
||||||
|
@ -85,9 +85,6 @@ pub trait DefDatabase: HirDebugDatabase + DefDatabase2 {
|
||||||
#[salsa::query_group(HirDatabaseStorage)]
|
#[salsa::query_group(HirDatabaseStorage)]
|
||||||
#[salsa::requires(salsa::Database)]
|
#[salsa::requires(salsa::Database)]
|
||||||
pub trait HirDatabase: DefDatabase + AstDatabase {
|
pub trait HirDatabase: DefDatabase + AstDatabase {
|
||||||
#[salsa::invoke(ExprScopes::expr_scopes_query)]
|
|
||||||
fn expr_scopes(&self, def: DefWithBody) -> Arc<ExprScopes>;
|
|
||||||
|
|
||||||
#[salsa::invoke(crate::ty::infer_query)]
|
#[salsa::invoke(crate::ty::infer_query)]
|
||||||
fn infer(&self, def: DefWithBody) -> Arc<InferenceResult>;
|
fn infer(&self, def: DefWithBody) -> Arc<InferenceResult>;
|
||||||
|
|
||||||
|
@ -113,12 +110,6 @@ pub trait HirDatabase: DefDatabase + AstDatabase {
|
||||||
#[salsa::invoke(crate::ty::generic_defaults_query)]
|
#[salsa::invoke(crate::ty::generic_defaults_query)]
|
||||||
fn generic_defaults(&self, def: GenericDef) -> Substs;
|
fn generic_defaults(&self, def: GenericDef) -> Substs;
|
||||||
|
|
||||||
#[salsa::invoke(crate::expr::body_with_source_map_query)]
|
|
||||||
fn body_with_source_map(&self, def: DefWithBody) -> (Arc<Body>, Arc<BodySourceMap>);
|
|
||||||
|
|
||||||
#[salsa::invoke(crate::expr::body_query)]
|
|
||||||
fn body(&self, def: DefWithBody) -> Arc<Body>;
|
|
||||||
|
|
||||||
#[salsa::invoke(crate::ty::method_resolution::CrateImplBlocks::impls_in_crate_query)]
|
#[salsa::invoke(crate::ty::method_resolution::CrateImplBlocks::impls_in_crate_query)]
|
||||||
fn impls_in_crate(&self, krate: Crate) -> Arc<CrateImplBlocks>;
|
fn impls_in_crate(&self, krate: Crate) -> Arc<CrateImplBlocks>;
|
||||||
|
|
||||||
|
|
|
@ -1,74 +1,233 @@
|
||||||
//! FIXME: write short doc here
|
//! FIXME: write short doc here
|
||||||
|
|
||||||
pub(crate) mod scope;
|
|
||||||
pub(crate) mod validation;
|
pub(crate) mod validation;
|
||||||
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use ra_syntax::{ast, AstPtr};
|
use ra_syntax::AstPtr;
|
||||||
|
|
||||||
use crate::{db::HirDatabase, DefWithBody, HasSource, Resolver};
|
use crate::{db::HirDatabase, DefWithBody, HasBody, Resolver};
|
||||||
|
|
||||||
pub use self::scope::ExprScopes;
|
|
||||||
|
|
||||||
pub use hir_def::{
|
pub use hir_def::{
|
||||||
body::{Body, BodySourceMap, ExprPtr, ExprSource, PatPtr, PatSource},
|
body::{
|
||||||
|
scope::{ExprScopes, ScopeEntry, ScopeId},
|
||||||
|
Body, BodySourceMap, ExprPtr, ExprSource, PatPtr, PatSource,
|
||||||
|
},
|
||||||
expr::{
|
expr::{
|
||||||
ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp,
|
ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp,
|
||||||
MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp,
|
MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) fn body_with_source_map_query(
|
|
||||||
db: &impl HirDatabase,
|
|
||||||
def: DefWithBody,
|
|
||||||
) -> (Arc<Body>, Arc<BodySourceMap>) {
|
|
||||||
let mut params = None;
|
|
||||||
|
|
||||||
let (file_id, body) = match def {
|
|
||||||
DefWithBody::Function(f) => {
|
|
||||||
let src = f.source(db);
|
|
||||||
params = src.ast.param_list();
|
|
||||||
(src.file_id, src.ast.body().map(ast::Expr::from))
|
|
||||||
}
|
|
||||||
DefWithBody::Const(c) => {
|
|
||||||
let src = c.source(db);
|
|
||||||
(src.file_id, src.ast.body())
|
|
||||||
}
|
|
||||||
DefWithBody::Static(s) => {
|
|
||||||
let src = s.source(db);
|
|
||||||
(src.file_id, src.ast.body())
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let resolver = hir_def::body::MacroResolver::new(db, def.module(db).id);
|
|
||||||
let (body, source_map) = Body::new(db, resolver, file_id, params, body);
|
|
||||||
(Arc::new(body), Arc::new(source_map))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn body_query(db: &impl HirDatabase, def: DefWithBody) -> Arc<Body> {
|
|
||||||
db.body_with_source_map(def).0
|
|
||||||
}
|
|
||||||
|
|
||||||
// needs arbitrary_self_types to be a method... or maybe move to the def?
|
// needs arbitrary_self_types to be a method... or maybe move to the def?
|
||||||
pub(crate) fn resolver_for_expr(
|
pub(crate) fn resolver_for_expr(
|
||||||
db: &impl HirDatabase,
|
db: &impl HirDatabase,
|
||||||
owner: DefWithBody,
|
owner: DefWithBody,
|
||||||
expr_id: ExprId,
|
expr_id: ExprId,
|
||||||
) -> Resolver {
|
) -> Resolver {
|
||||||
let scopes = db.expr_scopes(owner);
|
let scopes = owner.expr_scopes(db);
|
||||||
resolver_for_scope(db, owner, scopes.scope_for(expr_id))
|
resolver_for_scope(db, owner, scopes.scope_for(expr_id))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn resolver_for_scope(
|
pub(crate) fn resolver_for_scope(
|
||||||
db: &impl HirDatabase,
|
db: &impl HirDatabase,
|
||||||
owner: DefWithBody,
|
owner: DefWithBody,
|
||||||
scope_id: Option<scope::ScopeId>,
|
scope_id: Option<ScopeId>,
|
||||||
) -> Resolver {
|
) -> Resolver {
|
||||||
let mut r = owner.resolver(db);
|
let mut r = owner.resolver(db);
|
||||||
let scopes = db.expr_scopes(owner);
|
let scopes = owner.expr_scopes(db);
|
||||||
let scope_chain = scopes.scope_chain(scope_id).collect::<Vec<_>>();
|
let scope_chain = scopes.scope_chain(scope_id).collect::<Vec<_>>();
|
||||||
for scope in scope_chain.into_iter().rev() {
|
for scope in scope_chain.into_iter().rev() {
|
||||||
r = r.push_expr_scope(Arc::clone(&scopes), scope);
|
r = r.push_expr_scope(Arc::clone(&scopes), scope);
|
||||||
}
|
}
|
||||||
r
|
r
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use hir_expand::Source;
|
||||||
|
use ra_db::{fixture::WithFixture, SourceDatabase};
|
||||||
|
use ra_syntax::{algo::find_node_at_offset, ast, AstNode};
|
||||||
|
use test_utils::{assert_eq_text, extract_offset};
|
||||||
|
|
||||||
|
use crate::{source_binder::SourceAnalyzer, test_db::TestDB};
|
||||||
|
|
||||||
|
fn do_check(code: &str, expected: &[&str]) {
|
||||||
|
let (off, code) = extract_offset(code);
|
||||||
|
let code = {
|
||||||
|
let mut buf = String::new();
|
||||||
|
let off = u32::from(off) as usize;
|
||||||
|
buf.push_str(&code[..off]);
|
||||||
|
buf.push_str("marker");
|
||||||
|
buf.push_str(&code[off..]);
|
||||||
|
buf
|
||||||
|
};
|
||||||
|
|
||||||
|
let (db, file_id) = TestDB::with_single_file(&code);
|
||||||
|
|
||||||
|
let file = db.parse(file_id).ok().unwrap();
|
||||||
|
let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap();
|
||||||
|
let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None);
|
||||||
|
|
||||||
|
let scopes = analyzer.scopes();
|
||||||
|
let expr_id = analyzer
|
||||||
|
.body_source_map()
|
||||||
|
.node_expr(Source { file_id: file_id.into(), ast: &marker.into() })
|
||||||
|
.unwrap();
|
||||||
|
let scope = scopes.scope_for(expr_id);
|
||||||
|
|
||||||
|
let actual = scopes
|
||||||
|
.scope_chain(scope)
|
||||||
|
.flat_map(|scope| scopes.entries(scope))
|
||||||
|
.map(|it| it.name().to_string())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join("\n");
|
||||||
|
let expected = expected.join("\n");
|
||||||
|
assert_eq_text!(&expected, &actual);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_lambda_scope() {
|
||||||
|
do_check(
|
||||||
|
r"
|
||||||
|
fn quux(foo: i32) {
|
||||||
|
let f = |bar, baz: i32| {
|
||||||
|
<|>
|
||||||
|
};
|
||||||
|
}",
|
||||||
|
&["bar", "baz", "foo"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_call_scope() {
|
||||||
|
do_check(
|
||||||
|
r"
|
||||||
|
fn quux() {
|
||||||
|
f(|x| <|> );
|
||||||
|
}",
|
||||||
|
&["x"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_method_call_scope() {
|
||||||
|
do_check(
|
||||||
|
r"
|
||||||
|
fn quux() {
|
||||||
|
z.f(|x| <|> );
|
||||||
|
}",
|
||||||
|
&["x"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_loop_scope() {
|
||||||
|
do_check(
|
||||||
|
r"
|
||||||
|
fn quux() {
|
||||||
|
loop {
|
||||||
|
let x = ();
|
||||||
|
<|>
|
||||||
|
};
|
||||||
|
}",
|
||||||
|
&["x"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_match() {
|
||||||
|
do_check(
|
||||||
|
r"
|
||||||
|
fn quux() {
|
||||||
|
match () {
|
||||||
|
Some(x) => {
|
||||||
|
<|>
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}",
|
||||||
|
&["x"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_shadow_variable() {
|
||||||
|
do_check(
|
||||||
|
r"
|
||||||
|
fn foo(x: String) {
|
||||||
|
let x : &str = &x<|>;
|
||||||
|
}",
|
||||||
|
&["x"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn do_check_local_name(code: &str, expected_offset: u32) {
|
||||||
|
let (off, code) = extract_offset(code);
|
||||||
|
|
||||||
|
let (db, file_id) = TestDB::with_single_file(&code);
|
||||||
|
let file = db.parse(file_id).ok().unwrap();
|
||||||
|
let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into())
|
||||||
|
.expect("failed to find a name at the target offset");
|
||||||
|
let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap();
|
||||||
|
let analyzer = SourceAnalyzer::new(&db, file_id, name_ref.syntax(), None);
|
||||||
|
|
||||||
|
let local_name_entry = analyzer.resolve_local_name(&name_ref).unwrap();
|
||||||
|
let local_name =
|
||||||
|
local_name_entry.ptr().either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
|
||||||
|
assert_eq!(local_name.range(), expected_name.syntax().text_range());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_resolve_local_name() {
|
||||||
|
do_check_local_name(
|
||||||
|
r#"
|
||||||
|
fn foo(x: i32, y: u32) {
|
||||||
|
{
|
||||||
|
let z = x * 2;
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let t = x<|> * 3;
|
||||||
|
}
|
||||||
|
}"#,
|
||||||
|
21,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_resolve_local_name_declaration() {
|
||||||
|
do_check_local_name(
|
||||||
|
r#"
|
||||||
|
fn foo(x: String) {
|
||||||
|
let x : &str = &x<|>;
|
||||||
|
}"#,
|
||||||
|
21,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_resolve_local_name_shadow() {
|
||||||
|
do_check_local_name(
|
||||||
|
r"
|
||||||
|
fn foo(x: String) {
|
||||||
|
let x : &str = &x;
|
||||||
|
x<|>
|
||||||
|
}
|
||||||
|
",
|
||||||
|
53,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ref_patterns_contribute_bindings() {
|
||||||
|
do_check_local_name(
|
||||||
|
r"
|
||||||
|
fn foo() {
|
||||||
|
if let Some(&from) = bar() {
|
||||||
|
from<|>;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
",
|
||||||
|
53,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1,354 +0,0 @@
|
||||||
//! FIXME: write short doc here
|
|
||||||
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use ra_arena::{impl_arena_id, Arena, RawId};
|
|
||||||
use rustc_hash::FxHashMap;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
db::HirDatabase,
|
|
||||||
expr::{Body, Expr, ExprId, Pat, PatId, Statement},
|
|
||||||
DefWithBody, Name,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
|
||||||
pub struct ScopeId(RawId);
|
|
||||||
impl_arena_id!(ScopeId);
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
|
||||||
pub struct ExprScopes {
|
|
||||||
pub(crate) body: Arc<Body>,
|
|
||||||
scopes: Arena<ScopeId, ScopeData>,
|
|
||||||
scope_by_expr: FxHashMap<ExprId, ScopeId>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
|
||||||
pub(crate) struct ScopeEntry {
|
|
||||||
name: Name,
|
|
||||||
pat: PatId,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ScopeEntry {
|
|
||||||
pub(crate) fn name(&self) -> &Name {
|
|
||||||
&self.name
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn pat(&self) -> PatId {
|
|
||||||
self.pat
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
|
||||||
pub(crate) struct ScopeData {
|
|
||||||
parent: Option<ScopeId>,
|
|
||||||
entries: Vec<ScopeEntry>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExprScopes {
|
|
||||||
pub(crate) fn expr_scopes_query(db: &impl HirDatabase, def: DefWithBody) -> Arc<ExprScopes> {
|
|
||||||
let body = db.body(def);
|
|
||||||
let res = ExprScopes::new(body);
|
|
||||||
Arc::new(res)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn new(body: Arc<Body>) -> ExprScopes {
|
|
||||||
let mut scopes = ExprScopes {
|
|
||||||
body: body.clone(),
|
|
||||||
scopes: Arena::default(),
|
|
||||||
scope_by_expr: FxHashMap::default(),
|
|
||||||
};
|
|
||||||
let root = scopes.root_scope();
|
|
||||||
scopes.add_params_bindings(root, body.params());
|
|
||||||
compute_expr_scopes(body.body_expr(), &body, &mut scopes, root);
|
|
||||||
scopes
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn entries(&self, scope: ScopeId) -> &[ScopeEntry] {
|
|
||||||
&self.scopes[scope].entries
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn scope_chain(&self, scope: Option<ScopeId>) -> impl Iterator<Item = ScopeId> + '_ {
|
|
||||||
std::iter::successors(scope, move |&scope| self.scopes[scope].parent)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn scope_for(&self, expr: ExprId) -> Option<ScopeId> {
|
|
||||||
self.scope_by_expr.get(&expr).copied()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn scope_by_expr(&self) -> &FxHashMap<ExprId, ScopeId> {
|
|
||||||
&self.scope_by_expr
|
|
||||||
}
|
|
||||||
|
|
||||||
fn root_scope(&mut self) -> ScopeId {
|
|
||||||
self.scopes.alloc(ScopeData { parent: None, entries: vec![] })
|
|
||||||
}
|
|
||||||
|
|
||||||
fn new_scope(&mut self, parent: ScopeId) -> ScopeId {
|
|
||||||
self.scopes.alloc(ScopeData { parent: Some(parent), entries: vec![] })
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_bindings(&mut self, body: &Body, scope: ScopeId, pat: PatId) {
|
|
||||||
match &body[pat] {
|
|
||||||
Pat::Bind { name, .. } => {
|
|
||||||
// bind can have a sub pattern, but it's actually not allowed
|
|
||||||
// to bind to things in there
|
|
||||||
let entry = ScopeEntry { name: name.clone(), pat };
|
|
||||||
self.scopes[scope].entries.push(entry)
|
|
||||||
}
|
|
||||||
p => p.walk_child_pats(|pat| self.add_bindings(body, scope, pat)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_params_bindings(&mut self, scope: ScopeId, params: &[PatId]) {
|
|
||||||
let body = Arc::clone(&self.body);
|
|
||||||
params.iter().for_each(|pat| self.add_bindings(&body, scope, *pat));
|
|
||||||
}
|
|
||||||
|
|
||||||
fn set_scope(&mut self, node: ExprId, scope: ScopeId) {
|
|
||||||
self.scope_by_expr.insert(node, scope);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn compute_block_scopes(
|
|
||||||
statements: &[Statement],
|
|
||||||
tail: Option<ExprId>,
|
|
||||||
body: &Body,
|
|
||||||
scopes: &mut ExprScopes,
|
|
||||||
mut scope: ScopeId,
|
|
||||||
) {
|
|
||||||
for stmt in statements {
|
|
||||||
match stmt {
|
|
||||||
Statement::Let { pat, initializer, .. } => {
|
|
||||||
if let Some(expr) = initializer {
|
|
||||||
scopes.set_scope(*expr, scope);
|
|
||||||
compute_expr_scopes(*expr, body, scopes, scope);
|
|
||||||
}
|
|
||||||
scope = scopes.new_scope(scope);
|
|
||||||
scopes.add_bindings(body, scope, *pat);
|
|
||||||
}
|
|
||||||
Statement::Expr(expr) => {
|
|
||||||
scopes.set_scope(*expr, scope);
|
|
||||||
compute_expr_scopes(*expr, body, scopes, scope);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some(expr) = tail {
|
|
||||||
compute_expr_scopes(expr, body, scopes, scope);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope: ScopeId) {
|
|
||||||
scopes.set_scope(expr, scope);
|
|
||||||
match &body[expr] {
|
|
||||||
Expr::Block { statements, tail } => {
|
|
||||||
compute_block_scopes(&statements, *tail, body, scopes, scope);
|
|
||||||
}
|
|
||||||
Expr::For { iterable, pat, body: body_expr } => {
|
|
||||||
compute_expr_scopes(*iterable, body, scopes, scope);
|
|
||||||
let scope = scopes.new_scope(scope);
|
|
||||||
scopes.add_bindings(body, scope, *pat);
|
|
||||||
compute_expr_scopes(*body_expr, body, scopes, scope);
|
|
||||||
}
|
|
||||||
Expr::Lambda { args, body: body_expr, .. } => {
|
|
||||||
let scope = scopes.new_scope(scope);
|
|
||||||
scopes.add_params_bindings(scope, &args);
|
|
||||||
compute_expr_scopes(*body_expr, body, scopes, scope);
|
|
||||||
}
|
|
||||||
Expr::Match { expr, arms } => {
|
|
||||||
compute_expr_scopes(*expr, body, scopes, scope);
|
|
||||||
for arm in arms {
|
|
||||||
let scope = scopes.new_scope(scope);
|
|
||||||
for pat in &arm.pats {
|
|
||||||
scopes.add_bindings(body, scope, *pat);
|
|
||||||
}
|
|
||||||
scopes.set_scope(arm.expr, scope);
|
|
||||||
compute_expr_scopes(arm.expr, body, scopes, scope);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
e => e.walk_child_exprs(|e| compute_expr_scopes(e, body, scopes, scope)),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use ra_db::{fixture::WithFixture, SourceDatabase};
|
|
||||||
use ra_syntax::{algo::find_node_at_offset, ast, AstNode};
|
|
||||||
use test_utils::{assert_eq_text, extract_offset};
|
|
||||||
|
|
||||||
use crate::{source_binder::SourceAnalyzer, test_db::TestDB};
|
|
||||||
|
|
||||||
fn do_check(code: &str, expected: &[&str]) {
|
|
||||||
let (off, code) = extract_offset(code);
|
|
||||||
let code = {
|
|
||||||
let mut buf = String::new();
|
|
||||||
let off = u32::from(off) as usize;
|
|
||||||
buf.push_str(&code[..off]);
|
|
||||||
buf.push_str("marker");
|
|
||||||
buf.push_str(&code[off..]);
|
|
||||||
buf
|
|
||||||
};
|
|
||||||
|
|
||||||
let (db, file_id) = TestDB::with_single_file(&code);
|
|
||||||
let file = db.parse(file_id).ok().unwrap();
|
|
||||||
let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap();
|
|
||||||
let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None);
|
|
||||||
|
|
||||||
let scopes = analyzer.scopes();
|
|
||||||
let expr_id = analyzer.body_source_map().node_expr(&marker.into()).unwrap();
|
|
||||||
let scope = scopes.scope_for(expr_id);
|
|
||||||
|
|
||||||
let actual = scopes
|
|
||||||
.scope_chain(scope)
|
|
||||||
.flat_map(|scope| scopes.entries(scope))
|
|
||||||
.map(|it| it.name().to_string())
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n");
|
|
||||||
let expected = expected.join("\n");
|
|
||||||
assert_eq_text!(&expected, &actual);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_lambda_scope() {
|
|
||||||
do_check(
|
|
||||||
r"
|
|
||||||
fn quux(foo: i32) {
|
|
||||||
let f = |bar, baz: i32| {
|
|
||||||
<|>
|
|
||||||
};
|
|
||||||
}",
|
|
||||||
&["bar", "baz", "foo"],
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_call_scope() {
|
|
||||||
do_check(
|
|
||||||
r"
|
|
||||||
fn quux() {
|
|
||||||
f(|x| <|> );
|
|
||||||
}",
|
|
||||||
&["x"],
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_method_call_scope() {
|
|
||||||
do_check(
|
|
||||||
r"
|
|
||||||
fn quux() {
|
|
||||||
z.f(|x| <|> );
|
|
||||||
}",
|
|
||||||
&["x"],
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_loop_scope() {
|
|
||||||
do_check(
|
|
||||||
r"
|
|
||||||
fn quux() {
|
|
||||||
loop {
|
|
||||||
let x = ();
|
|
||||||
<|>
|
|
||||||
};
|
|
||||||
}",
|
|
||||||
&["x"],
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_match() {
|
|
||||||
do_check(
|
|
||||||
r"
|
|
||||||
fn quux() {
|
|
||||||
match () {
|
|
||||||
Some(x) => {
|
|
||||||
<|>
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}",
|
|
||||||
&["x"],
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_shadow_variable() {
|
|
||||||
do_check(
|
|
||||||
r"
|
|
||||||
fn foo(x: String) {
|
|
||||||
let x : &str = &x<|>;
|
|
||||||
}",
|
|
||||||
&["x"],
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn do_check_local_name(code: &str, expected_offset: u32) {
|
|
||||||
let (off, code) = extract_offset(code);
|
|
||||||
|
|
||||||
let (db, file_id) = TestDB::with_single_file(&code);
|
|
||||||
let file = db.parse(file_id).ok().unwrap();
|
|
||||||
let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into())
|
|
||||||
.expect("failed to find a name at the target offset");
|
|
||||||
let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap();
|
|
||||||
let analyzer = SourceAnalyzer::new(&db, file_id, name_ref.syntax(), None);
|
|
||||||
|
|
||||||
let local_name_entry = analyzer.resolve_local_name(&name_ref).unwrap();
|
|
||||||
let local_name =
|
|
||||||
local_name_entry.ptr().either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
|
|
||||||
assert_eq!(local_name.range(), expected_name.syntax().text_range());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_resolve_local_name() {
|
|
||||||
do_check_local_name(
|
|
||||||
r#"
|
|
||||||
fn foo(x: i32, y: u32) {
|
|
||||||
{
|
|
||||||
let z = x * 2;
|
|
||||||
}
|
|
||||||
{
|
|
||||||
let t = x<|> * 3;
|
|
||||||
}
|
|
||||||
}"#,
|
|
||||||
21,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_resolve_local_name_declaration() {
|
|
||||||
do_check_local_name(
|
|
||||||
r#"
|
|
||||||
fn foo(x: String) {
|
|
||||||
let x : &str = &x<|>;
|
|
||||||
}"#,
|
|
||||||
21,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_resolve_local_name_shadow() {
|
|
||||||
do_check_local_name(
|
|
||||||
r"
|
|
||||||
fn foo(x: String) {
|
|
||||||
let x : &str = &x;
|
|
||||||
x<|>
|
|
||||||
}
|
|
||||||
",
|
|
||||||
53,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn ref_patterns_contribute_bindings() {
|
|
||||||
do_check_local_name(
|
|
||||||
r"
|
|
||||||
fn foo() {
|
|
||||||
if let Some(&from) = bar() {
|
|
||||||
from<|>;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
",
|
|
||||||
53,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -3,9 +3,9 @@
|
||||||
//! It's unclear if we need this long-term, but it's definitelly useful while we
|
//! It's unclear if we need this long-term, but it's definitelly useful while we
|
||||||
//! are splitting the hir.
|
//! are splitting the hir.
|
||||||
|
|
||||||
use hir_def::{AdtId, EnumVariantId, ModuleDefId};
|
use hir_def::{AdtId, DefWithBodyId, EnumVariantId, ModuleDefId};
|
||||||
|
|
||||||
use crate::{Adt, EnumVariant, ModuleDef};
|
use crate::{Adt, DefWithBody, EnumVariant, ModuleDef};
|
||||||
|
|
||||||
macro_rules! from_id {
|
macro_rules! from_id {
|
||||||
($(($id:path, $ty:path)),*) => {$(
|
($(($id:path, $ty:path)),*) => {$(
|
||||||
|
@ -61,3 +61,13 @@ impl From<ModuleDefId> for ModuleDef {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<DefWithBody> for DefWithBodyId {
|
||||||
|
fn from(def: DefWithBody) -> Self {
|
||||||
|
match def {
|
||||||
|
DefWithBody::Function(it) => DefWithBodyId::FunctionId(it.id),
|
||||||
|
DefWithBody::Static(it) => DefWithBodyId::StaticId(it.id),
|
||||||
|
DefWithBody::Const(it) => DefWithBodyId::ConstId(it.id),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -10,7 +10,7 @@ use ra_syntax::{
|
||||||
use crate::{
|
use crate::{
|
||||||
db::{AstDatabase, DefDatabase, HirDatabase},
|
db::{AstDatabase, DefDatabase, HirDatabase},
|
||||||
ids::{AstItemDef, LocationCtx},
|
ids::{AstItemDef, LocationCtx},
|
||||||
AstId, Const, Crate, DefWithBody, Enum, EnumVariant, FieldSource, Function, HasSource,
|
AstId, Const, Crate, DefWithBody, Enum, EnumVariant, FieldSource, Function, HasBody, HasSource,
|
||||||
ImplBlock, Local, Module, ModuleSource, Source, Static, Struct, StructField, Trait, TypeAlias,
|
ImplBlock, Local, Module, ModuleSource, Source, Static, Struct, StructField, Trait, TypeAlias,
|
||||||
Union, VariantDef,
|
Union, VariantDef,
|
||||||
};
|
};
|
||||||
|
@ -144,8 +144,9 @@ impl Local {
|
||||||
};
|
};
|
||||||
Some(res)
|
Some(res)
|
||||||
})?;
|
})?;
|
||||||
let (_body, source_map) = db.body_with_source_map(parent);
|
let source_map = parent.body_source_map(db);
|
||||||
let pat_id = source_map.node_pat(&src.ast.into())?;
|
let src = src.map(ast::Pat::from);
|
||||||
|
let pat_id = source_map.node_pat(src.as_ref())?;
|
||||||
Some(Local { parent, pat_id })
|
Some(Local { parent, pat_id })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -77,9 +77,7 @@ pub use crate::{
|
||||||
source_binder::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer},
|
source_binder::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer},
|
||||||
ty::{
|
ty::{
|
||||||
display::HirDisplay,
|
display::HirDisplay,
|
||||||
primitive::{
|
primitive::{FloatBitness, FloatTy, IntBitness, IntTy, Signedness, Uncertain},
|
||||||
FloatBitness, FloatTy, IntBitness, IntTy, Signedness, UncertainFloatTy, UncertainIntTy,
|
|
||||||
},
|
|
||||||
ApplicationTy, CallableDef, Substs, TraitRef, Ty, TypeCtor, TypeWalk,
|
ApplicationTy, CallableDef, Substs, TraitRef, Ty, TypeCtor, TypeWalk,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
|
@ -13,10 +13,7 @@ use rustc_hash::FxHashSet;
|
||||||
use crate::{
|
use crate::{
|
||||||
code_model::Crate,
|
code_model::Crate,
|
||||||
db::{DefDatabase, HirDatabase},
|
db::{DefDatabase, HirDatabase},
|
||||||
expr::{
|
expr::{ExprScopes, PatId, ScopeId},
|
||||||
scope::{ExprScopes, ScopeId},
|
|
||||||
PatId,
|
|
||||||
},
|
|
||||||
generics::GenericParams,
|
generics::GenericParams,
|
||||||
impl_block::ImplBlock,
|
impl_block::ImplBlock,
|
||||||
Adt, Const, Enum, EnumVariant, Function, MacroDef, ModuleDef, PerNs, Static, Struct, Trait,
|
Adt, Const, Enum, EnumVariant, Function, MacroDef, ModuleDef, PerNs, Static, Struct, Trait,
|
||||||
|
|
|
@ -7,8 +7,11 @@
|
||||||
//! purely for "IDE needs".
|
//! purely for "IDE needs".
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use hir_def::path::known;
|
use hir_def::{
|
||||||
use hir_expand::name::AsName;
|
expr::{ExprId, PatId},
|
||||||
|
path::known,
|
||||||
|
};
|
||||||
|
use hir_expand::{name::AsName, Source};
|
||||||
use ra_db::FileId;
|
use ra_db::FileId;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
ast::{self, AstNode},
|
ast::{self, AstNode},
|
||||||
|
@ -20,11 +23,7 @@ use rustc_hash::FxHashSet;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::HirDatabase,
|
db::HirDatabase,
|
||||||
expr::{
|
expr::{self, BodySourceMap, ExprScopes, ScopeId},
|
||||||
self,
|
|
||||||
scope::{ExprScopes, ScopeId},
|
|
||||||
BodySourceMap,
|
|
||||||
},
|
|
||||||
ids::LocationCtx,
|
ids::LocationCtx,
|
||||||
resolve::{ScopeDef, TypeNs, ValueNs},
|
resolve::{ScopeDef, TypeNs, ValueNs},
|
||||||
ty::method_resolution::{self, implements_trait},
|
ty::method_resolution::{self, implements_trait},
|
||||||
|
@ -93,6 +92,8 @@ fn def_with_body_from_child_node(
|
||||||
/// original source files. It should not be used inside the HIR itself.
|
/// original source files. It should not be used inside the HIR itself.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct SourceAnalyzer {
|
pub struct SourceAnalyzer {
|
||||||
|
// FIXME: this doesn't handle macros at all
|
||||||
|
file_id: FileId,
|
||||||
resolver: Resolver,
|
resolver: Resolver,
|
||||||
body_owner: Option<DefWithBody>,
|
body_owner: Option<DefWithBody>,
|
||||||
body_source_map: Option<Arc<BodySourceMap>>,
|
body_source_map: Option<Arc<BodySourceMap>>,
|
||||||
|
@ -145,9 +146,9 @@ impl SourceAnalyzer {
|
||||||
let def_with_body = def_with_body_from_child_node(db, file_id, node);
|
let def_with_body = def_with_body_from_child_node(db, file_id, node);
|
||||||
if let Some(def) = def_with_body {
|
if let Some(def) = def_with_body {
|
||||||
let source_map = def.body_source_map(db);
|
let source_map = def.body_source_map(db);
|
||||||
let scopes = db.expr_scopes(def);
|
let scopes = def.expr_scopes(db);
|
||||||
let scope = match offset {
|
let scope = match offset {
|
||||||
None => scope_for(&scopes, &source_map, &node),
|
None => scope_for(&scopes, &source_map, file_id.into(), &node),
|
||||||
Some(offset) => scope_for_offset(&scopes, &source_map, file_id.into(), offset),
|
Some(offset) => scope_for_offset(&scopes, &source_map, file_id.into(), offset),
|
||||||
};
|
};
|
||||||
let resolver = expr::resolver_for_scope(db, def, scope);
|
let resolver = expr::resolver_for_scope(db, def, scope);
|
||||||
|
@ -157,6 +158,7 @@ impl SourceAnalyzer {
|
||||||
body_source_map: Some(source_map),
|
body_source_map: Some(source_map),
|
||||||
infer: Some(def.infer(db)),
|
infer: Some(def.infer(db)),
|
||||||
scopes: Some(scopes),
|
scopes: Some(scopes),
|
||||||
|
file_id,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
SourceAnalyzer {
|
SourceAnalyzer {
|
||||||
|
@ -168,17 +170,28 @@ impl SourceAnalyzer {
|
||||||
body_source_map: None,
|
body_source_map: None,
|
||||||
infer: None,
|
infer: None,
|
||||||
scopes: None,
|
scopes: None,
|
||||||
|
file_id,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn expr_id(&self, expr: &ast::Expr) -> Option<ExprId> {
|
||||||
|
let src = Source { file_id: self.file_id.into(), ast: expr };
|
||||||
|
self.body_source_map.as_ref()?.node_expr(src)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> {
|
||||||
|
let src = Source { file_id: self.file_id.into(), ast: pat };
|
||||||
|
self.body_source_map.as_ref()?.node_pat(src)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn type_of(&self, _db: &impl HirDatabase, expr: &ast::Expr) -> Option<crate::Ty> {
|
pub fn type_of(&self, _db: &impl HirDatabase, expr: &ast::Expr) -> Option<crate::Ty> {
|
||||||
let expr_id = self.body_source_map.as_ref()?.node_expr(expr)?;
|
let expr_id = self.expr_id(expr)?;
|
||||||
Some(self.infer.as_ref()?[expr_id].clone())
|
Some(self.infer.as_ref()?[expr_id].clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn type_of_pat(&self, _db: &impl HirDatabase, pat: &ast::Pat) -> Option<crate::Ty> {
|
pub fn type_of_pat(&self, _db: &impl HirDatabase, pat: &ast::Pat) -> Option<crate::Ty> {
|
||||||
let pat_id = self.body_source_map.as_ref()?.node_pat(pat)?;
|
let pat_id = self.pat_id(pat)?;
|
||||||
Some(self.infer.as_ref()?[pat_id].clone())
|
Some(self.infer.as_ref()?[pat_id].clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -191,22 +204,22 @@ impl SourceAnalyzer {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
|
pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
|
||||||
let expr_id = self.body_source_map.as_ref()?.node_expr(&call.clone().into())?;
|
let expr_id = self.expr_id(&call.clone().into())?;
|
||||||
self.infer.as_ref()?.method_resolution(expr_id)
|
self.infer.as_ref()?.method_resolution(expr_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> {
|
pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> {
|
||||||
let expr_id = self.body_source_map.as_ref()?.node_expr(&field.clone().into())?;
|
let expr_id = self.expr_id(&field.clone().into())?;
|
||||||
self.infer.as_ref()?.field_resolution(expr_id)
|
self.infer.as_ref()?.field_resolution(expr_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<crate::VariantDef> {
|
pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<crate::VariantDef> {
|
||||||
let expr_id = self.body_source_map.as_ref()?.node_expr(&record_lit.clone().into())?;
|
let expr_id = self.expr_id(&record_lit.clone().into())?;
|
||||||
self.infer.as_ref()?.variant_resolution_for_expr(expr_id)
|
self.infer.as_ref()?.variant_resolution_for_expr(expr_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolve_record_pattern(&self, record_pat: &ast::RecordPat) -> Option<crate::VariantDef> {
|
pub fn resolve_record_pattern(&self, record_pat: &ast::RecordPat) -> Option<crate::VariantDef> {
|
||||||
let pat_id = self.body_source_map.as_ref()?.node_pat(&record_pat.clone().into())?;
|
let pat_id = self.pat_id(&record_pat.clone().into())?;
|
||||||
self.infer.as_ref()?.variant_resolution_for_pat(pat_id)
|
self.infer.as_ref()?.variant_resolution_for_pat(pat_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -264,13 +277,13 @@ impl SourceAnalyzer {
|
||||||
|
|
||||||
pub fn resolve_path(&self, db: &impl HirDatabase, path: &ast::Path) -> Option<PathResolution> {
|
pub fn resolve_path(&self, db: &impl HirDatabase, path: &ast::Path) -> Option<PathResolution> {
|
||||||
if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) {
|
if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) {
|
||||||
let expr_id = self.body_source_map.as_ref()?.node_expr(&path_expr.into())?;
|
let expr_id = self.expr_id(&path_expr.into())?;
|
||||||
if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) {
|
if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) {
|
||||||
return Some(PathResolution::AssocItem(assoc));
|
return Some(PathResolution::AssocItem(assoc));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(path_pat) = path.syntax().parent().and_then(ast::PathPat::cast) {
|
if let Some(path_pat) = path.syntax().parent().and_then(ast::PathPat::cast) {
|
||||||
let pat_id = self.body_source_map.as_ref()?.node_pat(&path_pat.into())?;
|
let pat_id = self.pat_id(&path_pat.into())?;
|
||||||
if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) {
|
if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) {
|
||||||
return Some(PathResolution::AssocItem(assoc));
|
return Some(PathResolution::AssocItem(assoc));
|
||||||
}
|
}
|
||||||
|
@ -285,7 +298,7 @@ impl SourceAnalyzer {
|
||||||
let name = name_ref.as_name();
|
let name = name_ref.as_name();
|
||||||
let source_map = self.body_source_map.as_ref()?;
|
let source_map = self.body_source_map.as_ref()?;
|
||||||
let scopes = self.scopes.as_ref()?;
|
let scopes = self.scopes.as_ref()?;
|
||||||
let scope = scope_for(scopes, source_map, name_ref.syntax());
|
let scope = scope_for(scopes, source_map, self.file_id.into(), name_ref.syntax());
|
||||||
let ret = scopes
|
let ret = scopes
|
||||||
.scope_chain(scope)
|
.scope_chain(scope)
|
||||||
.flat_map(|scope| scopes.entries(scope).iter())
|
.flat_map(|scope| scopes.entries(scope).iter())
|
||||||
|
@ -418,11 +431,12 @@ impl SourceAnalyzer {
|
||||||
fn scope_for(
|
fn scope_for(
|
||||||
scopes: &ExprScopes,
|
scopes: &ExprScopes,
|
||||||
source_map: &BodySourceMap,
|
source_map: &BodySourceMap,
|
||||||
|
file_id: HirFileId,
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
) -> Option<ScopeId> {
|
) -> Option<ScopeId> {
|
||||||
node.ancestors()
|
node.ancestors()
|
||||||
.filter_map(ast::Expr::cast)
|
.filter_map(ast::Expr::cast)
|
||||||
.filter_map(|it| source_map.node_expr(&it))
|
.filter_map(|it| source_map.node_expr(Source { file_id, ast: &it }))
|
||||||
.find_map(|it| scopes.scope_for(it))
|
.find_map(|it| scopes.scope_for(it))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ use crate::{
|
||||||
expr::ExprId,
|
expr::ExprId,
|
||||||
generics::{GenericParams, HasGenericParams},
|
generics::{GenericParams, HasGenericParams},
|
||||||
util::make_mut_slice,
|
util::make_mut_slice,
|
||||||
Adt, Crate, DefWithBody, Mutability, Name, Trait, TypeAlias,
|
Adt, Crate, DefWithBody, FloatTy, IntTy, Mutability, Name, Trait, TypeAlias, Uncertain,
|
||||||
};
|
};
|
||||||
use display::{HirDisplay, HirFormatter};
|
use display::{HirDisplay, HirFormatter};
|
||||||
|
|
||||||
|
@ -47,10 +47,10 @@ pub enum TypeCtor {
|
||||||
Char,
|
Char,
|
||||||
|
|
||||||
/// A primitive integer type. For example, `i32`.
|
/// A primitive integer type. For example, `i32`.
|
||||||
Int(primitive::UncertainIntTy),
|
Int(Uncertain<IntTy>),
|
||||||
|
|
||||||
/// A primitive floating-point type. For example, `f64`.
|
/// A primitive floating-point type. For example, `f64`.
|
||||||
Float(primitive::UncertainFloatTy),
|
Float(Uncertain<FloatTy>),
|
||||||
|
|
||||||
/// Structures, enumerations and unions.
|
/// Structures, enumerations and unions.
|
||||||
Adt(Adt),
|
Adt(Adt),
|
||||||
|
|
|
@ -31,10 +31,10 @@ use ra_prof::profile;
|
||||||
use test_utils::tested_by;
|
use test_utils::tested_by;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
lower, primitive,
|
lower,
|
||||||
traits::{Guidance, Obligation, ProjectionPredicate, Solution},
|
traits::{Guidance, Obligation, ProjectionPredicate, Solution},
|
||||||
ApplicationTy, InEnvironment, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypableDef,
|
ApplicationTy, InEnvironment, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypableDef,
|
||||||
TypeCtor, TypeWalk,
|
TypeCtor, TypeWalk, Uncertain,
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
adt::VariantDef,
|
adt::VariantDef,
|
||||||
|
@ -43,7 +43,8 @@ use crate::{
|
||||||
expr::{BindingAnnotation, Body, ExprId, PatId},
|
expr::{BindingAnnotation, Body, ExprId, PatId},
|
||||||
resolve::{Resolver, TypeNs},
|
resolve::{Resolver, TypeNs},
|
||||||
ty::infer::diagnostics::InferenceDiagnostic,
|
ty::infer::diagnostics::InferenceDiagnostic,
|
||||||
Adt, AssocItem, ConstData, DefWithBody, FnData, Function, Path, StructField,
|
Adt, AssocItem, ConstData, DefWithBody, FloatTy, FnData, Function, HasBody, IntTy, Path,
|
||||||
|
StructField,
|
||||||
};
|
};
|
||||||
|
|
||||||
macro_rules! ty_app {
|
macro_rules! ty_app {
|
||||||
|
@ -214,7 +215,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
coerce_unsized_map: Self::init_coerce_unsized_map(db, &resolver),
|
coerce_unsized_map: Self::init_coerce_unsized_map(db, &resolver),
|
||||||
db,
|
db,
|
||||||
owner,
|
owner,
|
||||||
body: db.body(owner),
|
body: owner.body(db),
|
||||||
resolver,
|
resolver,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -358,14 +359,12 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
fn insert_type_vars_shallow(&mut self, ty: Ty) -> Ty {
|
fn insert_type_vars_shallow(&mut self, ty: Ty) -> Ty {
|
||||||
match ty {
|
match ty {
|
||||||
Ty::Unknown => self.new_type_var(),
|
Ty::Unknown => self.new_type_var(),
|
||||||
Ty::Apply(ApplicationTy {
|
Ty::Apply(ApplicationTy { ctor: TypeCtor::Int(Uncertain::Unknown), .. }) => {
|
||||||
ctor: TypeCtor::Int(primitive::UncertainIntTy::Unknown),
|
self.new_integer_var()
|
||||||
..
|
}
|
||||||
}) => self.new_integer_var(),
|
Ty::Apply(ApplicationTy { ctor: TypeCtor::Float(Uncertain::Unknown), .. }) => {
|
||||||
Ty::Apply(ApplicationTy {
|
self.new_float_var()
|
||||||
ctor: TypeCtor::Float(primitive::UncertainFloatTy::Unknown),
|
}
|
||||||
..
|
|
||||||
}) => self.new_float_var(),
|
|
||||||
_ => ty,
|
_ => ty,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -684,12 +683,8 @@ impl InferTy {
|
||||||
fn fallback_value(self) -> Ty {
|
fn fallback_value(self) -> Ty {
|
||||||
match self {
|
match self {
|
||||||
InferTy::TypeVar(..) => Ty::Unknown,
|
InferTy::TypeVar(..) => Ty::Unknown,
|
||||||
InferTy::IntVar(..) => {
|
InferTy::IntVar(..) => Ty::simple(TypeCtor::Int(Uncertain::Known(IntTy::i32()))),
|
||||||
Ty::simple(TypeCtor::Int(primitive::UncertainIntTy::Known(primitive::IntTy::i32())))
|
InferTy::FloatVar(..) => Ty::simple(TypeCtor::Float(Uncertain::Known(FloatTy::f64()))),
|
||||||
}
|
|
||||||
InferTy::FloatVar(..) => Ty::simple(TypeCtor::Float(
|
|
||||||
primitive::UncertainFloatTy::Known(primitive::FloatTy::f64()),
|
|
||||||
)),
|
|
||||||
InferTy::MaybeNeverTypeVar(..) => Ty::simple(TypeCtor::Never),
|
InferTy::MaybeNeverTypeVar(..) => Ty::simple(TypeCtor::Never),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,10 @@
|
||||||
use std::iter::{repeat, repeat_with};
|
use std::iter::{repeat, repeat_with};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use hir_def::path::{GenericArg, GenericArgs};
|
use hir_def::{
|
||||||
|
builtin_type::Signedness,
|
||||||
|
path::{GenericArg, GenericArgs},
|
||||||
|
};
|
||||||
use hir_expand::name;
|
use hir_expand::name;
|
||||||
|
|
||||||
use super::{BindingMode, Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch};
|
use super::{BindingMode, Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch};
|
||||||
|
@ -12,8 +15,9 @@ use crate::{
|
||||||
expr::{self, Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp},
|
expr::{self, Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp},
|
||||||
generics::{GenericParams, HasGenericParams},
|
generics::{GenericParams, HasGenericParams},
|
||||||
ty::{
|
ty::{
|
||||||
autoderef, method_resolution, op, primitive, CallableDef, InferTy, Mutability, Namespace,
|
autoderef, method_resolution, op, CallableDef, InferTy, IntTy, Mutability, Namespace,
|
||||||
Obligation, ProjectionPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, TypeWalk,
|
Obligation, ProjectionPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, TypeWalk,
|
||||||
|
Uncertain,
|
||||||
},
|
},
|
||||||
Adt, Name,
|
Adt, Name,
|
||||||
};
|
};
|
||||||
|
@ -337,13 +341,11 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
UnaryOp::Neg => {
|
UnaryOp::Neg => {
|
||||||
match &inner_ty {
|
match &inner_ty {
|
||||||
Ty::Apply(a_ty) => match a_ty.ctor {
|
Ty::Apply(a_ty) => match a_ty.ctor {
|
||||||
TypeCtor::Int(primitive::UncertainIntTy::Unknown)
|
TypeCtor::Int(Uncertain::Unknown)
|
||||||
| TypeCtor::Int(primitive::UncertainIntTy::Known(
|
| TypeCtor::Int(Uncertain::Known(IntTy {
|
||||||
primitive::IntTy {
|
signedness: Signedness::Signed,
|
||||||
signedness: primitive::Signedness::Signed,
|
|
||||||
..
|
..
|
||||||
},
|
}))
|
||||||
))
|
|
||||||
| TypeCtor::Float(..) => inner_ty,
|
| TypeCtor::Float(..) => inner_ty,
|
||||||
_ => Ty::Unknown,
|
_ => Ty::Unknown,
|
||||||
},
|
},
|
||||||
|
@ -428,9 +430,9 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
);
|
);
|
||||||
self.infer_expr(
|
self.infer_expr(
|
||||||
*repeat,
|
*repeat,
|
||||||
&Expectation::has_type(Ty::simple(TypeCtor::Int(
|
&Expectation::has_type(Ty::simple(TypeCtor::Int(Uncertain::Known(
|
||||||
primitive::UncertainIntTy::Known(primitive::IntTy::usize()),
|
IntTy::usize(),
|
||||||
))),
|
)))),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -443,9 +445,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
Ty::apply_one(TypeCtor::Ref(Mutability::Shared), Ty::simple(TypeCtor::Str))
|
Ty::apply_one(TypeCtor::Ref(Mutability::Shared), Ty::simple(TypeCtor::Str))
|
||||||
}
|
}
|
||||||
Literal::ByteString(..) => {
|
Literal::ByteString(..) => {
|
||||||
let byte_type = Ty::simple(TypeCtor::Int(primitive::UncertainIntTy::Known(
|
let byte_type = Ty::simple(TypeCtor::Int(Uncertain::Known(IntTy::u8())));
|
||||||
primitive::IntTy::u8(),
|
|
||||||
)));
|
|
||||||
let slice_type = Ty::apply_one(TypeCtor::Slice, byte_type);
|
let slice_type = Ty::apply_one(TypeCtor::Slice, byte_type);
|
||||||
Ty::apply_one(TypeCtor::Ref(Mutability::Shared), slice_type)
|
Ty::apply_one(TypeCtor::Ref(Mutability::Shared), slice_type)
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,7 @@ use crate::{
|
||||||
generics::{GenericDef, WherePredicate},
|
generics::{GenericDef, WherePredicate},
|
||||||
resolve::{Resolver, TypeNs},
|
resolve::{Resolver, TypeNs},
|
||||||
ty::{
|
ty::{
|
||||||
primitive::{FloatTy, IntTy, UncertainFloatTy, UncertainIntTy},
|
primitive::{FloatTy, IntTy, Uncertain},
|
||||||
Adt,
|
Adt,
|
||||||
},
|
},
|
||||||
util::make_mut_slice,
|
util::make_mut_slice,
|
||||||
|
@ -674,20 +674,20 @@ impl From<BuiltinFloat> for FloatTy {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Option<BuiltinInt>> for UncertainIntTy {
|
impl From<Option<BuiltinInt>> for Uncertain<IntTy> {
|
||||||
fn from(t: Option<BuiltinInt>) -> Self {
|
fn from(t: Option<BuiltinInt>) -> Self {
|
||||||
match t {
|
match t {
|
||||||
None => UncertainIntTy::Unknown,
|
None => Uncertain::Unknown,
|
||||||
Some(t) => UncertainIntTy::Known(t.into()),
|
Some(t) => Uncertain::Known(t.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Option<BuiltinFloat>> for UncertainFloatTy {
|
impl From<Option<BuiltinFloat>> for Uncertain<FloatTy> {
|
||||||
fn from(t: Option<BuiltinFloat>) -> Self {
|
fn from(t: Option<BuiltinFloat>) -> Self {
|
||||||
match t {
|
match t {
|
||||||
None => UncertainFloatTy::Unknown,
|
None => Uncertain::Unknown,
|
||||||
Some(t) => UncertainFloatTy::Known(t.into()),
|
Some(t) => Uncertain::Known(t.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,16 +8,17 @@ use arrayvec::ArrayVec;
|
||||||
use hir_def::CrateModuleId;
|
use hir_def::CrateModuleId;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
|
|
||||||
use super::{autoderef, lower, Canonical, InEnvironment, TraitEnvironment, TraitRef};
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::HirDatabase,
|
db::HirDatabase,
|
||||||
impl_block::{ImplBlock, ImplId},
|
impl_block::{ImplBlock, ImplId},
|
||||||
resolve::Resolver,
|
resolve::Resolver,
|
||||||
ty::primitive::{FloatBitness, UncertainFloatTy, UncertainIntTy},
|
ty::primitive::{FloatBitness, Uncertain},
|
||||||
ty::{Ty, TypeCtor},
|
ty::{Ty, TypeCtor},
|
||||||
AssocItem, Crate, Function, Module, Mutability, Name, Trait,
|
AssocItem, Crate, Function, Module, Mutability, Name, Trait,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use super::{autoderef, lower, Canonical, InEnvironment, TraitEnvironment, TraitRef};
|
||||||
|
|
||||||
/// This is used as a key for indexing impls.
|
/// This is used as a key for indexing impls.
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||||
pub enum TyFingerprint {
|
pub enum TyFingerprint {
|
||||||
|
@ -140,14 +141,12 @@ fn def_crates(db: &impl HirDatabase, cur_crate: Crate, ty: &Ty) -> Option<ArrayV
|
||||||
TypeCtor::Adt(def_id) => Some(std::iter::once(def_id.krate(db)?).collect()),
|
TypeCtor::Adt(def_id) => Some(std::iter::once(def_id.krate(db)?).collect()),
|
||||||
TypeCtor::Bool => lang_item_crate!(db, cur_crate, "bool"),
|
TypeCtor::Bool => lang_item_crate!(db, cur_crate, "bool"),
|
||||||
TypeCtor::Char => lang_item_crate!(db, cur_crate, "char"),
|
TypeCtor::Char => lang_item_crate!(db, cur_crate, "char"),
|
||||||
TypeCtor::Float(UncertainFloatTy::Known(f)) => match f.bitness {
|
TypeCtor::Float(Uncertain::Known(f)) => match f.bitness {
|
||||||
// There are two lang items: one in libcore (fXX) and one in libstd (fXX_runtime)
|
// There are two lang items: one in libcore (fXX) and one in libstd (fXX_runtime)
|
||||||
FloatBitness::X32 => lang_item_crate!(db, cur_crate, "f32", "f32_runtime"),
|
FloatBitness::X32 => lang_item_crate!(db, cur_crate, "f32", "f32_runtime"),
|
||||||
FloatBitness::X64 => lang_item_crate!(db, cur_crate, "f64", "f64_runtime"),
|
FloatBitness::X64 => lang_item_crate!(db, cur_crate, "f64", "f64_runtime"),
|
||||||
},
|
},
|
||||||
TypeCtor::Int(UncertainIntTy::Known(i)) => {
|
TypeCtor::Int(Uncertain::Known(i)) => lang_item_crate!(db, cur_crate, i.ty_to_string()),
|
||||||
lang_item_crate!(db, cur_crate, i.ty_to_string())
|
|
||||||
}
|
|
||||||
TypeCtor::Str => lang_item_crate!(db, cur_crate, "str_alloc", "str"),
|
TypeCtor::Str => lang_item_crate!(db, cur_crate, "str_alloc", "str"),
|
||||||
TypeCtor::Slice => lang_item_crate!(db, cur_crate, "slice_alloc", "slice"),
|
TypeCtor::Slice => lang_item_crate!(db, cur_crate, "slice_alloc", "slice"),
|
||||||
TypeCtor::RawPtr(Mutability::Shared) => lang_item_crate!(db, cur_crate, "const_ptr"),
|
TypeCtor::RawPtr(Mutability::Shared) => lang_item_crate!(db, cur_crate, "const_ptr"),
|
||||||
|
|
|
@ -4,44 +4,38 @@ use std::fmt;
|
||||||
|
|
||||||
pub use hir_def::builtin_type::{FloatBitness, IntBitness, Signedness};
|
pub use hir_def::builtin_type::{FloatBitness, IntBitness, Signedness};
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
#[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)]
|
||||||
pub enum UncertainIntTy {
|
pub enum Uncertain<T> {
|
||||||
Unknown,
|
Unknown,
|
||||||
Known(IntTy),
|
Known(T),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<IntTy> for UncertainIntTy {
|
impl From<IntTy> for Uncertain<IntTy> {
|
||||||
fn from(ty: IntTy) -> Self {
|
fn from(ty: IntTy) -> Self {
|
||||||
UncertainIntTy::Known(ty)
|
Uncertain::Known(ty)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for UncertainIntTy {
|
impl fmt::Display for Uncertain<IntTy> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match *self {
|
match *self {
|
||||||
UncertainIntTy::Unknown => write!(f, "{{integer}}"),
|
Uncertain::Unknown => write!(f, "{{integer}}"),
|
||||||
UncertainIntTy::Known(ty) => write!(f, "{}", ty),
|
Uncertain::Known(ty) => write!(f, "{}", ty),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
impl From<FloatTy> for Uncertain<FloatTy> {
|
||||||
pub enum UncertainFloatTy {
|
|
||||||
Unknown,
|
|
||||||
Known(FloatTy),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<FloatTy> for UncertainFloatTy {
|
|
||||||
fn from(ty: FloatTy) -> Self {
|
fn from(ty: FloatTy) -> Self {
|
||||||
UncertainFloatTy::Known(ty)
|
Uncertain::Known(ty)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for UncertainFloatTy {
|
impl fmt::Display for Uncertain<FloatTy> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match *self {
|
match *self {
|
||||||
UncertainFloatTy::Unknown => write!(f, "{{float}}"),
|
Uncertain::Unknown => write!(f, "{{float}}"),
|
||||||
UncertainFloatTy::Known(ty) => write!(f, "{}", ty),
|
Uncertain::Known(ty) => write!(f, "{}", ty),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,7 @@ use crate::{
|
||||||
ApplicationTy, GenericPredicate, Namespace, ProjectionTy, Substs, TraitRef, Ty, TypeCtor,
|
ApplicationTy, GenericPredicate, Namespace, ProjectionTy, Substs, TraitRef, Ty, TypeCtor,
|
||||||
TypeWalk,
|
TypeWalk,
|
||||||
},
|
},
|
||||||
AssocItem, Crate, ImplBlock, Trait, TypeAlias,
|
AssocItem, Crate, HasBody, ImplBlock, Trait, TypeAlias,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// This represents a trait whose name we could not resolve.
|
/// This represents a trait whose name we could not resolve.
|
||||||
|
@ -714,7 +714,7 @@ fn closure_fn_trait_impl_datum(
|
||||||
let fn_once_trait = get_fn_trait(db, krate, super::FnTrait::FnOnce)?;
|
let fn_once_trait = get_fn_trait(db, krate, super::FnTrait::FnOnce)?;
|
||||||
let trait_ = get_fn_trait(db, krate, data.fn_trait)?; // get corresponding fn trait
|
let trait_ = get_fn_trait(db, krate, data.fn_trait)?; // get corresponding fn trait
|
||||||
|
|
||||||
let num_args: u16 = match &db.body(data.def)[data.expr] {
|
let num_args: u16 = match &data.def.body(db)[data.expr] {
|
||||||
crate::expr::Expr::Lambda { args, .. } => args.len() as u16,
|
crate::expr::Expr::Lambda { args, .. } => args.len() as u16,
|
||||||
_ => {
|
_ => {
|
||||||
log::warn!("closure for closure type {:?} not found", data);
|
log::warn!("closure for closure type {:?} not found", data);
|
||||||
|
|
|
@ -1,11 +1,15 @@
|
||||||
//! FIXME: write short doc here
|
//! FIXME: write short doc here
|
||||||
mod lower;
|
mod lower;
|
||||||
|
pub mod scope;
|
||||||
|
|
||||||
use std::{ops::Index, sync::Arc};
|
use std::{ops::Index, sync::Arc};
|
||||||
|
|
||||||
use hir_expand::{either::Either, HirFileId, MacroDefId, Source};
|
use hir_expand::{
|
||||||
|
either::Either, hygiene::Hygiene, AstId, HirFileId, MacroCallLoc, MacroDefId, MacroFileKind,
|
||||||
|
Source,
|
||||||
|
};
|
||||||
use ra_arena::{map::ArenaMap, Arena};
|
use ra_arena::{map::ArenaMap, Arena};
|
||||||
use ra_syntax::{ast, AstPtr};
|
use ra_syntax::{ast, AstNode, AstPtr};
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -13,28 +17,87 @@ use crate::{
|
||||||
expr::{Expr, ExprId, Pat, PatId},
|
expr::{Expr, ExprId, Pat, PatId},
|
||||||
nameres::CrateDefMap,
|
nameres::CrateDefMap,
|
||||||
path::Path,
|
path::Path,
|
||||||
ModuleId,
|
AstItemDef, DefWithBodyId, ModuleId,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub struct MacroResolver {
|
pub struct Expander {
|
||||||
crate_def_map: Arc<CrateDefMap>,
|
crate_def_map: Arc<CrateDefMap>,
|
||||||
|
current_file_id: HirFileId,
|
||||||
|
hygiene: Hygiene,
|
||||||
module: ModuleId,
|
module: ModuleId,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MacroResolver {
|
impl Expander {
|
||||||
pub fn new(db: &impl DefDatabase2, module: ModuleId) -> MacroResolver {
|
pub fn new(db: &impl DefDatabase2, current_file_id: HirFileId, module: ModuleId) -> Expander {
|
||||||
MacroResolver { crate_def_map: db.crate_def_map(module.krate), module }
|
let crate_def_map = db.crate_def_map(module.krate);
|
||||||
|
let hygiene = Hygiene::new(db, current_file_id);
|
||||||
|
Expander { crate_def_map, current_file_id, hygiene, module }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn resolve_path_as_macro(
|
fn enter_expand(
|
||||||
&self,
|
&mut self,
|
||||||
db: &impl DefDatabase2,
|
db: &impl DefDatabase2,
|
||||||
path: &Path,
|
macro_call: ast::MacroCall,
|
||||||
) -> Option<MacroDefId> {
|
) -> Option<(Mark, ast::Expr)> {
|
||||||
|
let ast_id = AstId::new(
|
||||||
|
self.current_file_id,
|
||||||
|
db.ast_id_map(self.current_file_id).ast_id(¯o_call),
|
||||||
|
);
|
||||||
|
|
||||||
|
if let Some(path) = macro_call.path().and_then(|path| self.parse_path(path)) {
|
||||||
|
if let Some(def) = self.resolve_path_as_macro(db, &path) {
|
||||||
|
let call_id = db.intern_macro(MacroCallLoc { def, ast_id });
|
||||||
|
let file_id = call_id.as_file(MacroFileKind::Expr);
|
||||||
|
if let Some(node) = db.parse_or_expand(file_id) {
|
||||||
|
if let Some(expr) = ast::Expr::cast(node) {
|
||||||
|
log::debug!("macro expansion {:#?}", expr.syntax());
|
||||||
|
|
||||||
|
let mark = Mark { file_id: self.current_file_id };
|
||||||
|
self.hygiene = Hygiene::new(db, file_id);
|
||||||
|
self.current_file_id = file_id;
|
||||||
|
|
||||||
|
return Some((mark, expr));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME: Instead of just dropping the error from expansion
|
||||||
|
// report it
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn exit(&mut self, db: &impl DefDatabase2, mark: Mark) {
|
||||||
|
self.hygiene = Hygiene::new(db, mark.file_id);
|
||||||
|
self.current_file_id = mark.file_id;
|
||||||
|
std::mem::forget(mark);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_source<T>(&self, ast: T) -> Source<T> {
|
||||||
|
Source { file_id: self.current_file_id, ast }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_path(&mut self, path: ast::Path) -> Option<Path> {
|
||||||
|
Path::from_src(path, &self.hygiene)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resolve_path_as_macro(&self, db: &impl DefDatabase2, path: &Path) -> Option<MacroDefId> {
|
||||||
self.crate_def_map.resolve_path(db, self.module.module_id, path).0.get_macros()
|
self.crate_def_map.resolve_path(db, self.module.module_id, path).0.get_macros()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct Mark {
|
||||||
|
file_id: HirFileId,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for Mark {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
if !std::thread::panicking() {
|
||||||
|
panic!("dropped mark")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// The body of an item (function, const etc.).
|
/// The body of an item (function, const etc.).
|
||||||
#[derive(Debug, Eq, PartialEq)]
|
#[derive(Debug, Eq, PartialEq)]
|
||||||
pub struct Body {
|
pub struct Body {
|
||||||
|
@ -70,22 +133,51 @@ pub type PatSource = Source<PatPtr>;
|
||||||
/// this properly for macros.
|
/// this properly for macros.
|
||||||
#[derive(Default, Debug, Eq, PartialEq)]
|
#[derive(Default, Debug, Eq, PartialEq)]
|
||||||
pub struct BodySourceMap {
|
pub struct BodySourceMap {
|
||||||
expr_map: FxHashMap<ExprPtr, ExprId>,
|
expr_map: FxHashMap<ExprSource, ExprId>,
|
||||||
expr_map_back: ArenaMap<ExprId, ExprSource>,
|
expr_map_back: ArenaMap<ExprId, ExprSource>,
|
||||||
pat_map: FxHashMap<PatPtr, PatId>,
|
pat_map: FxHashMap<PatSource, PatId>,
|
||||||
pat_map_back: ArenaMap<PatId, PatSource>,
|
pat_map_back: ArenaMap<PatId, PatSource>,
|
||||||
field_map: FxHashMap<(ExprId, usize), AstPtr<ast::RecordField>>,
|
field_map: FxHashMap<(ExprId, usize), AstPtr<ast::RecordField>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Body {
|
impl Body {
|
||||||
pub fn new(
|
pub(crate) fn body_with_source_map_query(
|
||||||
db: &impl DefDatabase2,
|
db: &impl DefDatabase2,
|
||||||
resolver: MacroResolver,
|
def: DefWithBodyId,
|
||||||
file_id: HirFileId,
|
) -> (Arc<Body>, Arc<BodySourceMap>) {
|
||||||
|
let mut params = None;
|
||||||
|
|
||||||
|
let (file_id, module, body) = match def {
|
||||||
|
DefWithBodyId::FunctionId(f) => {
|
||||||
|
let src = f.source(db);
|
||||||
|
params = src.ast.param_list();
|
||||||
|
(src.file_id, f.module(db), src.ast.body().map(ast::Expr::from))
|
||||||
|
}
|
||||||
|
DefWithBodyId::ConstId(c) => {
|
||||||
|
let src = c.source(db);
|
||||||
|
(src.file_id, c.module(db), src.ast.body())
|
||||||
|
}
|
||||||
|
DefWithBodyId::StaticId(s) => {
|
||||||
|
let src = s.source(db);
|
||||||
|
(src.file_id, s.module(db), src.ast.body())
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let expander = Expander::new(db, file_id, module);
|
||||||
|
let (body, source_map) = Body::new(db, expander, params, body);
|
||||||
|
(Arc::new(body), Arc::new(source_map))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn body_query(db: &impl DefDatabase2, def: DefWithBodyId) -> Arc<Body> {
|
||||||
|
db.body_with_source_map(def).0
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new(
|
||||||
|
db: &impl DefDatabase2,
|
||||||
|
expander: Expander,
|
||||||
params: Option<ast::ParamList>,
|
params: Option<ast::ParamList>,
|
||||||
body: Option<ast::Expr>,
|
body: Option<ast::Expr>,
|
||||||
) -> (Body, BodySourceMap) {
|
) -> (Body, BodySourceMap) {
|
||||||
lower::lower(db, resolver, file_id, params, body)
|
lower::lower(db, expander, params, body)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn params(&self) -> &[PatId] {
|
pub fn params(&self) -> &[PatId] {
|
||||||
|
@ -126,16 +218,18 @@ impl BodySourceMap {
|
||||||
self.expr_map_back.get(expr).copied()
|
self.expr_map_back.get(expr).copied()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn node_expr(&self, node: &ast::Expr) -> Option<ExprId> {
|
pub fn node_expr(&self, node: Source<&ast::Expr>) -> Option<ExprId> {
|
||||||
self.expr_map.get(&Either::A(AstPtr::new(node))).cloned()
|
let src = node.map(|it| Either::A(AstPtr::new(it)));
|
||||||
|
self.expr_map.get(&src).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pat_syntax(&self, pat: PatId) -> Option<PatSource> {
|
pub fn pat_syntax(&self, pat: PatId) -> Option<PatSource> {
|
||||||
self.pat_map_back.get(pat).copied()
|
self.pat_map_back.get(pat).copied()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn node_pat(&self, node: &ast::Pat) -> Option<PatId> {
|
pub fn node_pat(&self, node: Source<&ast::Pat>) -> Option<PatId> {
|
||||||
self.pat_map.get(&Either::A(AstPtr::new(node))).cloned()
|
let src = node.map(|it| Either::A(AstPtr::new(it)));
|
||||||
|
self.pat_map.get(&src).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn field_syntax(&self, expr: ExprId, field: usize) -> AstPtr<ast::RecordField> {
|
pub fn field_syntax(&self, expr: ExprId, field: usize) -> AstPtr<ast::RecordField> {
|
||||||
|
|
|
@ -2,9 +2,7 @@
|
||||||
|
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
either::Either,
|
either::Either,
|
||||||
hygiene::Hygiene,
|
|
||||||
name::{self, AsName, Name},
|
name::{self, AsName, Name},
|
||||||
AstId, HirFileId, MacroCallLoc, MacroFileKind, Source,
|
|
||||||
};
|
};
|
||||||
use ra_arena::Arena;
|
use ra_arena::Arena;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
|
@ -16,7 +14,7 @@ use ra_syntax::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
body::{Body, BodySourceMap, MacroResolver, PatPtr},
|
body::{Body, BodySourceMap, Expander, PatPtr},
|
||||||
builtin_type::{BuiltinFloat, BuiltinInt},
|
builtin_type::{BuiltinFloat, BuiltinInt},
|
||||||
db::DefDatabase2,
|
db::DefDatabase2,
|
||||||
expr::{
|
expr::{
|
||||||
|
@ -30,16 +28,13 @@ use crate::{
|
||||||
|
|
||||||
pub(super) fn lower(
|
pub(super) fn lower(
|
||||||
db: &impl DefDatabase2,
|
db: &impl DefDatabase2,
|
||||||
resolver: MacroResolver,
|
expander: Expander,
|
||||||
file_id: HirFileId,
|
|
||||||
params: Option<ast::ParamList>,
|
params: Option<ast::ParamList>,
|
||||||
body: Option<ast::Expr>,
|
body: Option<ast::Expr>,
|
||||||
) -> (Body, BodySourceMap) {
|
) -> (Body, BodySourceMap) {
|
||||||
ExprCollector {
|
ExprCollector {
|
||||||
resolver,
|
expander,
|
||||||
db,
|
db,
|
||||||
original_file_id: file_id,
|
|
||||||
current_file_id: file_id,
|
|
||||||
source_map: BodySourceMap::default(),
|
source_map: BodySourceMap::default(),
|
||||||
body: Body {
|
body: Body {
|
||||||
exprs: Arena::default(),
|
exprs: Arena::default(),
|
||||||
|
@ -53,9 +48,7 @@ pub(super) fn lower(
|
||||||
|
|
||||||
struct ExprCollector<DB> {
|
struct ExprCollector<DB> {
|
||||||
db: DB,
|
db: DB,
|
||||||
resolver: MacroResolver,
|
expander: Expander,
|
||||||
original_file_id: HirFileId,
|
|
||||||
current_file_id: HirFileId,
|
|
||||||
|
|
||||||
body: Body,
|
body: Body,
|
||||||
source_map: BodySourceMap,
|
source_map: BodySourceMap,
|
||||||
|
@ -101,12 +94,9 @@ where
|
||||||
fn alloc_expr(&mut self, expr: Expr, ptr: AstPtr<ast::Expr>) -> ExprId {
|
fn alloc_expr(&mut self, expr: Expr, ptr: AstPtr<ast::Expr>) -> ExprId {
|
||||||
let ptr = Either::A(ptr);
|
let ptr = Either::A(ptr);
|
||||||
let id = self.body.exprs.alloc(expr);
|
let id = self.body.exprs.alloc(expr);
|
||||||
if self.current_file_id == self.original_file_id {
|
let src = self.expander.to_source(ptr);
|
||||||
self.source_map.expr_map.insert(ptr, id);
|
self.source_map.expr_map.insert(src, id);
|
||||||
}
|
self.source_map.expr_map_back.insert(id, src);
|
||||||
self.source_map
|
|
||||||
.expr_map_back
|
|
||||||
.insert(id, Source { file_id: self.current_file_id, ast: ptr });
|
|
||||||
id
|
id
|
||||||
}
|
}
|
||||||
// desugared exprs don't have ptr, that's wrong and should be fixed
|
// desugared exprs don't have ptr, that's wrong and should be fixed
|
||||||
|
@ -117,20 +107,16 @@ where
|
||||||
fn alloc_expr_field_shorthand(&mut self, expr: Expr, ptr: AstPtr<ast::RecordField>) -> ExprId {
|
fn alloc_expr_field_shorthand(&mut self, expr: Expr, ptr: AstPtr<ast::RecordField>) -> ExprId {
|
||||||
let ptr = Either::B(ptr);
|
let ptr = Either::B(ptr);
|
||||||
let id = self.body.exprs.alloc(expr);
|
let id = self.body.exprs.alloc(expr);
|
||||||
if self.current_file_id == self.original_file_id {
|
let src = self.expander.to_source(ptr);
|
||||||
self.source_map.expr_map.insert(ptr, id);
|
self.source_map.expr_map.insert(src, id);
|
||||||
}
|
self.source_map.expr_map_back.insert(id, src);
|
||||||
self.source_map
|
|
||||||
.expr_map_back
|
|
||||||
.insert(id, Source { file_id: self.current_file_id, ast: ptr });
|
|
||||||
id
|
id
|
||||||
}
|
}
|
||||||
fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId {
|
fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId {
|
||||||
let id = self.body.pats.alloc(pat);
|
let id = self.body.pats.alloc(pat);
|
||||||
if self.current_file_id == self.original_file_id {
|
let src = self.expander.to_source(ptr);
|
||||||
self.source_map.pat_map.insert(ptr, id);
|
self.source_map.pat_map.insert(src, id);
|
||||||
}
|
self.source_map.pat_map_back.insert(id, src);
|
||||||
self.source_map.pat_map_back.insert(id, Source { file_id: self.current_file_id, ast: ptr });
|
|
||||||
id
|
id
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -272,7 +258,7 @@ where
|
||||||
ast::Expr::PathExpr(e) => {
|
ast::Expr::PathExpr(e) => {
|
||||||
let path = e
|
let path = e
|
||||||
.path()
|
.path()
|
||||||
.and_then(|path| self.parse_path(path))
|
.and_then(|path| self.expander.parse_path(path))
|
||||||
.map(Expr::Path)
|
.map(Expr::Path)
|
||||||
.unwrap_or(Expr::Missing);
|
.unwrap_or(Expr::Missing);
|
||||||
self.alloc_expr(path, syntax_ptr)
|
self.alloc_expr(path, syntax_ptr)
|
||||||
|
@ -288,7 +274,8 @@ where
|
||||||
ast::Expr::ParenExpr(e) => {
|
ast::Expr::ParenExpr(e) => {
|
||||||
let inner = self.collect_expr_opt(e.expr());
|
let inner = self.collect_expr_opt(e.expr());
|
||||||
// make the paren expr point to the inner expression as well
|
// make the paren expr point to the inner expression as well
|
||||||
self.source_map.expr_map.insert(Either::A(syntax_ptr), inner);
|
let src = self.expander.to_source(Either::A(syntax_ptr));
|
||||||
|
self.source_map.expr_map.insert(src, inner);
|
||||||
inner
|
inner
|
||||||
}
|
}
|
||||||
ast::Expr::ReturnExpr(e) => {
|
ast::Expr::ReturnExpr(e) => {
|
||||||
|
@ -296,7 +283,7 @@ where
|
||||||
self.alloc_expr(Expr::Return { expr }, syntax_ptr)
|
self.alloc_expr(Expr::Return { expr }, syntax_ptr)
|
||||||
}
|
}
|
||||||
ast::Expr::RecordLit(e) => {
|
ast::Expr::RecordLit(e) => {
|
||||||
let path = e.path().and_then(|path| self.parse_path(path));
|
let path = e.path().and_then(|path| self.expander.parse_path(path));
|
||||||
let mut field_ptrs = Vec::new();
|
let mut field_ptrs = Vec::new();
|
||||||
let record_lit = if let Some(nfl) = e.record_field_list() {
|
let record_lit = if let Some(nfl) = e.record_field_list() {
|
||||||
let fields = nfl
|
let fields = nfl
|
||||||
|
@ -443,32 +430,14 @@ where
|
||||||
// FIXME implement HIR for these:
|
// FIXME implement HIR for these:
|
||||||
ast::Expr::Label(_e) => self.alloc_expr(Expr::Missing, syntax_ptr),
|
ast::Expr::Label(_e) => self.alloc_expr(Expr::Missing, syntax_ptr),
|
||||||
ast::Expr::RangeExpr(_e) => self.alloc_expr(Expr::Missing, syntax_ptr),
|
ast::Expr::RangeExpr(_e) => self.alloc_expr(Expr::Missing, syntax_ptr),
|
||||||
ast::Expr::MacroCall(e) => {
|
ast::Expr::MacroCall(e) => match self.expander.enter_expand(self.db, e) {
|
||||||
let ast_id = AstId::new(
|
Some((mark, expansion)) => {
|
||||||
self.current_file_id,
|
let id = self.collect_expr(expansion);
|
||||||
self.db.ast_id_map(self.current_file_id).ast_id(&e),
|
self.expander.exit(self.db, mark);
|
||||||
);
|
id
|
||||||
|
|
||||||
if let Some(path) = e.path().and_then(|path| self.parse_path(path)) {
|
|
||||||
if let Some(def) = self.resolver.resolve_path_as_macro(self.db, &path) {
|
|
||||||
let call_id = self.db.intern_macro(MacroCallLoc { def, ast_id });
|
|
||||||
let file_id = call_id.as_file(MacroFileKind::Expr);
|
|
||||||
if let Some(node) = self.db.parse_or_expand(file_id) {
|
|
||||||
if let Some(expr) = ast::Expr::cast(node) {
|
|
||||||
log::debug!("macro expansion {:#?}", expr.syntax());
|
|
||||||
let old_file_id =
|
|
||||||
std::mem::replace(&mut self.current_file_id, file_id);
|
|
||||||
let id = self.collect_expr(expr);
|
|
||||||
self.current_file_id = old_file_id;
|
|
||||||
return id;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// FIXME: Instead of just dropping the error from expansion
|
|
||||||
// report it
|
|
||||||
self.alloc_expr(Expr::Missing, syntax_ptr)
|
|
||||||
}
|
}
|
||||||
|
None => self.alloc_expr(Expr::Missing, syntax_ptr),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -519,7 +488,7 @@ where
|
||||||
Pat::Bind { name, mode: annotation, subpat }
|
Pat::Bind { name, mode: annotation, subpat }
|
||||||
}
|
}
|
||||||
ast::Pat::TupleStructPat(p) => {
|
ast::Pat::TupleStructPat(p) => {
|
||||||
let path = p.path().and_then(|path| self.parse_path(path));
|
let path = p.path().and_then(|path| self.expander.parse_path(path));
|
||||||
let args = p.args().map(|p| self.collect_pat(p)).collect();
|
let args = p.args().map(|p| self.collect_pat(p)).collect();
|
||||||
Pat::TupleStruct { path, args }
|
Pat::TupleStruct { path, args }
|
||||||
}
|
}
|
||||||
|
@ -529,7 +498,7 @@ where
|
||||||
Pat::Ref { pat, mutability }
|
Pat::Ref { pat, mutability }
|
||||||
}
|
}
|
||||||
ast::Pat::PathPat(p) => {
|
ast::Pat::PathPat(p) => {
|
||||||
let path = p.path().and_then(|path| self.parse_path(path));
|
let path = p.path().and_then(|path| self.expander.parse_path(path));
|
||||||
path.map(Pat::Path).unwrap_or(Pat::Missing)
|
path.map(Pat::Path).unwrap_or(Pat::Missing)
|
||||||
}
|
}
|
||||||
ast::Pat::TuplePat(p) => {
|
ast::Pat::TuplePat(p) => {
|
||||||
|
@ -538,7 +507,7 @@ where
|
||||||
}
|
}
|
||||||
ast::Pat::PlaceholderPat(_) => Pat::Wild,
|
ast::Pat::PlaceholderPat(_) => Pat::Wild,
|
||||||
ast::Pat::RecordPat(p) => {
|
ast::Pat::RecordPat(p) => {
|
||||||
let path = p.path().and_then(|path| self.parse_path(path));
|
let path = p.path().and_then(|path| self.expander.parse_path(path));
|
||||||
let record_field_pat_list =
|
let record_field_pat_list =
|
||||||
p.record_field_pat_list().expect("every struct should have a field list");
|
p.record_field_pat_list().expect("every struct should have a field list");
|
||||||
let mut fields: Vec<_> = record_field_pat_list
|
let mut fields: Vec<_> = record_field_pat_list
|
||||||
|
@ -579,11 +548,6 @@ where
|
||||||
self.missing_pat()
|
self.missing_pat()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_path(&mut self, path: ast::Path) -> Option<Path> {
|
|
||||||
let hygiene = Hygiene::new(self.db, self.current_file_id);
|
|
||||||
Path::from_src(path, &hygiene)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<ast::BinOp> for BinaryOp {
|
impl From<ast::BinOp> for BinaryOp {
|
||||||
|
|
165
crates/ra_hir_def/src/body/scope.rs
Normal file
165
crates/ra_hir_def/src/body/scope.rs
Normal file
|
@ -0,0 +1,165 @@
|
||||||
|
//! FIXME: write short doc here
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use hir_expand::name::Name;
|
||||||
|
use ra_arena::{impl_arena_id, Arena, RawId};
|
||||||
|
use rustc_hash::FxHashMap;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
body::Body,
|
||||||
|
db::DefDatabase2,
|
||||||
|
expr::{Expr, ExprId, Pat, PatId, Statement},
|
||||||
|
DefWithBodyId,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
pub struct ScopeId(RawId);
|
||||||
|
impl_arena_id!(ScopeId);
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
pub struct ExprScopes {
|
||||||
|
scopes: Arena<ScopeId, ScopeData>,
|
||||||
|
scope_by_expr: FxHashMap<ExprId, ScopeId>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
pub struct ScopeEntry {
|
||||||
|
name: Name,
|
||||||
|
pat: PatId,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ScopeEntry {
|
||||||
|
pub fn name(&self) -> &Name {
|
||||||
|
&self.name
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn pat(&self) -> PatId {
|
||||||
|
self.pat
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
pub struct ScopeData {
|
||||||
|
parent: Option<ScopeId>,
|
||||||
|
entries: Vec<ScopeEntry>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExprScopes {
|
||||||
|
pub(crate) fn expr_scopes_query(db: &impl DefDatabase2, def: DefWithBodyId) -> Arc<ExprScopes> {
|
||||||
|
let body = db.body(def);
|
||||||
|
Arc::new(ExprScopes::new(&*body))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new(body: &Body) -> ExprScopes {
|
||||||
|
let mut scopes =
|
||||||
|
ExprScopes { scopes: Arena::default(), scope_by_expr: FxHashMap::default() };
|
||||||
|
let root = scopes.root_scope();
|
||||||
|
scopes.add_params_bindings(body, root, body.params());
|
||||||
|
compute_expr_scopes(body.body_expr(), body, &mut scopes, root);
|
||||||
|
scopes
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn entries(&self, scope: ScopeId) -> &[ScopeEntry] {
|
||||||
|
&self.scopes[scope].entries
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn scope_chain(&self, scope: Option<ScopeId>) -> impl Iterator<Item = ScopeId> + '_ {
|
||||||
|
std::iter::successors(scope, move |&scope| self.scopes[scope].parent)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn scope_for(&self, expr: ExprId) -> Option<ScopeId> {
|
||||||
|
self.scope_by_expr.get(&expr).copied()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn scope_by_expr(&self) -> &FxHashMap<ExprId, ScopeId> {
|
||||||
|
&self.scope_by_expr
|
||||||
|
}
|
||||||
|
|
||||||
|
fn root_scope(&mut self) -> ScopeId {
|
||||||
|
self.scopes.alloc(ScopeData { parent: None, entries: vec![] })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new_scope(&mut self, parent: ScopeId) -> ScopeId {
|
||||||
|
self.scopes.alloc(ScopeData { parent: Some(parent), entries: vec![] })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_bindings(&mut self, body: &Body, scope: ScopeId, pat: PatId) {
|
||||||
|
match &body[pat] {
|
||||||
|
Pat::Bind { name, .. } => {
|
||||||
|
// bind can have a sub pattern, but it's actually not allowed
|
||||||
|
// to bind to things in there
|
||||||
|
let entry = ScopeEntry { name: name.clone(), pat };
|
||||||
|
self.scopes[scope].entries.push(entry)
|
||||||
|
}
|
||||||
|
p => p.walk_child_pats(|pat| self.add_bindings(body, scope, pat)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_params_bindings(&mut self, body: &Body, scope: ScopeId, params: &[PatId]) {
|
||||||
|
params.iter().for_each(|pat| self.add_bindings(body, scope, *pat));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_scope(&mut self, node: ExprId, scope: ScopeId) {
|
||||||
|
self.scope_by_expr.insert(node, scope);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compute_block_scopes(
|
||||||
|
statements: &[Statement],
|
||||||
|
tail: Option<ExprId>,
|
||||||
|
body: &Body,
|
||||||
|
scopes: &mut ExprScopes,
|
||||||
|
mut scope: ScopeId,
|
||||||
|
) {
|
||||||
|
for stmt in statements {
|
||||||
|
match stmt {
|
||||||
|
Statement::Let { pat, initializer, .. } => {
|
||||||
|
if let Some(expr) = initializer {
|
||||||
|
scopes.set_scope(*expr, scope);
|
||||||
|
compute_expr_scopes(*expr, body, scopes, scope);
|
||||||
|
}
|
||||||
|
scope = scopes.new_scope(scope);
|
||||||
|
scopes.add_bindings(body, scope, *pat);
|
||||||
|
}
|
||||||
|
Statement::Expr(expr) => {
|
||||||
|
scopes.set_scope(*expr, scope);
|
||||||
|
compute_expr_scopes(*expr, body, scopes, scope);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(expr) = tail {
|
||||||
|
compute_expr_scopes(expr, body, scopes, scope);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope: ScopeId) {
|
||||||
|
scopes.set_scope(expr, scope);
|
||||||
|
match &body[expr] {
|
||||||
|
Expr::Block { statements, tail } => {
|
||||||
|
compute_block_scopes(&statements, *tail, body, scopes, scope);
|
||||||
|
}
|
||||||
|
Expr::For { iterable, pat, body: body_expr } => {
|
||||||
|
compute_expr_scopes(*iterable, body, scopes, scope);
|
||||||
|
let scope = scopes.new_scope(scope);
|
||||||
|
scopes.add_bindings(body, scope, *pat);
|
||||||
|
compute_expr_scopes(*body_expr, body, scopes, scope);
|
||||||
|
}
|
||||||
|
Expr::Lambda { args, body: body_expr, .. } => {
|
||||||
|
let scope = scopes.new_scope(scope);
|
||||||
|
scopes.add_params_bindings(body, scope, &args);
|
||||||
|
compute_expr_scopes(*body_expr, body, scopes, scope);
|
||||||
|
}
|
||||||
|
Expr::Match { expr, arms } => {
|
||||||
|
compute_expr_scopes(*expr, body, scopes, scope);
|
||||||
|
for arm in arms {
|
||||||
|
let scope = scopes.new_scope(scope);
|
||||||
|
for pat in &arm.pats {
|
||||||
|
scopes.add_bindings(body, scope, *pat);
|
||||||
|
}
|
||||||
|
scopes.set_scope(arm.expr, scope);
|
||||||
|
compute_expr_scopes(arm.expr, body, scopes, scope);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
e => e.walk_child_exprs(|e| compute_expr_scopes(e, body, scopes, scope)),
|
||||||
|
};
|
||||||
|
}
|
|
@ -7,11 +7,12 @@ use ra_syntax::ast;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
adt::{EnumData, StructData},
|
adt::{EnumData, StructData},
|
||||||
|
body::{scope::ExprScopes, Body, BodySourceMap},
|
||||||
nameres::{
|
nameres::{
|
||||||
raw::{ImportSourceMap, RawItems},
|
raw::{ImportSourceMap, RawItems},
|
||||||
CrateDefMap,
|
CrateDefMap,
|
||||||
},
|
},
|
||||||
EnumId, StructOrUnionId,
|
DefWithBodyId, EnumId, StructOrUnionId,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[salsa::query_group(InternDatabaseStorage)]
|
#[salsa::query_group(InternDatabaseStorage)]
|
||||||
|
@ -52,4 +53,13 @@ pub trait DefDatabase2: InternDatabase + AstDatabase {
|
||||||
|
|
||||||
#[salsa::invoke(EnumData::enum_data_query)]
|
#[salsa::invoke(EnumData::enum_data_query)]
|
||||||
fn enum_data(&self, e: EnumId) -> Arc<EnumData>;
|
fn enum_data(&self, e: EnumId) -> Arc<EnumData>;
|
||||||
|
|
||||||
|
#[salsa::invoke(Body::body_with_source_map_query)]
|
||||||
|
fn body_with_source_map(&self, def: DefWithBodyId) -> (Arc<Body>, Arc<BodySourceMap>);
|
||||||
|
|
||||||
|
#[salsa::invoke(Body::body_query)]
|
||||||
|
fn body(&self, def: DefWithBodyId) -> Arc<Body>;
|
||||||
|
|
||||||
|
#[salsa::invoke(ExprScopes::expr_scopes_query)]
|
||||||
|
fn expr_scopes(&self, def: DefWithBodyId) -> Arc<ExprScopes>;
|
||||||
}
|
}
|
||||||
|
|
|
@ -374,3 +374,13 @@ impl_froms!(
|
||||||
TypeAliasId,
|
TypeAliasId,
|
||||||
BuiltinType
|
BuiltinType
|
||||||
);
|
);
|
||||||
|
|
||||||
|
/// The defs which have a body.
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
pub enum DefWithBodyId {
|
||||||
|
FunctionId(FunctionId),
|
||||||
|
StaticId(StaticId),
|
||||||
|
ConstId(ConstId),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl_froms!(DefWithBodyId: FunctionId, ConstId, StaticId);
|
||||||
|
|
|
@ -223,7 +223,7 @@ impl<N: AstNode> AstId<N> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
|
||||||
pub struct Source<T> {
|
pub struct Source<T> {
|
||||||
pub file_id: HirFileId,
|
pub file_id: HirFileId,
|
||||||
pub ast: T,
|
pub ast: T,
|
||||||
|
@ -233,6 +233,9 @@ impl<T> Source<T> {
|
||||||
pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
|
pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
|
||||||
Source { file_id: self.file_id, ast: f(self.ast) }
|
Source { file_id: self.file_id, ast: f(self.ast) }
|
||||||
}
|
}
|
||||||
|
pub fn as_ref(&self) -> Source<&T> {
|
||||||
|
Source { file_id: self.file_id, ast: &self.ast }
|
||||||
|
}
|
||||||
pub fn file_syntax(&self, db: &impl db::AstDatabase) -> SyntaxNode {
|
pub fn file_syntax(&self, db: &impl db::AstDatabase) -> SyntaxNode {
|
||||||
db.parse_or_expand(self.file_id).expect("source created from invalid file")
|
db.parse_or_expand(self.file_id).expect("source created from invalid file")
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,24 +40,24 @@ pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> {
|
||||||
// E.g. for after the break in `if break {}`, this should not match
|
// E.g. for after the break in `if break {}`, this should not match
|
||||||
pub(super) const ATOM_EXPR_FIRST: TokenSet =
|
pub(super) const ATOM_EXPR_FIRST: TokenSet =
|
||||||
LITERAL_FIRST.union(paths::PATH_FIRST).union(token_set![
|
LITERAL_FIRST.union(paths::PATH_FIRST).union(token_set![
|
||||||
L_PAREN,
|
T!['('],
|
||||||
L_CURLY,
|
T!['{'],
|
||||||
L_BRACK,
|
T!['['],
|
||||||
PIPE,
|
T![|],
|
||||||
MOVE_KW,
|
T![move],
|
||||||
BOX_KW,
|
T![box],
|
||||||
IF_KW,
|
T![if],
|
||||||
WHILE_KW,
|
T![while],
|
||||||
MATCH_KW,
|
T![match],
|
||||||
UNSAFE_KW,
|
T![unsafe],
|
||||||
RETURN_KW,
|
T![return],
|
||||||
BREAK_KW,
|
T![break],
|
||||||
CONTINUE_KW,
|
T![continue],
|
||||||
|
T![async],
|
||||||
|
T![try],
|
||||||
|
T![loop],
|
||||||
|
T![for],
|
||||||
LIFETIME,
|
LIFETIME,
|
||||||
ASYNC_KW,
|
|
||||||
TRY_KW,
|
|
||||||
LOOP_KW,
|
|
||||||
FOR_KW,
|
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW];
|
const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW];
|
||||||
|
|
|
@ -30,8 +30,8 @@ const fn mask(kind: SyntaxKind) -> u128 {
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! token_set {
|
macro_rules! token_set {
|
||||||
($($t:ident),*) => { TokenSet::empty()$(.union(TokenSet::singleton($t)))* };
|
($($t:expr),*) => { TokenSet::empty()$(.union(TokenSet::singleton($t)))* };
|
||||||
($($t:ident),* ,) => { token_set!($($t),*) };
|
($($t:expr),* ,) => { token_set!($($t),*) };
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -9,6 +9,8 @@ publish = false
|
||||||
once_cell = "1.0.1"
|
once_cell = "1.0.1"
|
||||||
itertools = "0.8.0"
|
itertools = "0.8.0"
|
||||||
backtrace = "0.3.28"
|
backtrace = "0.3.28"
|
||||||
|
|
||||||
|
[target.'cfg(not(target_env = "msvc"))'.dependencies]
|
||||||
jemallocator = { version = "0.3.2", optional = true }
|
jemallocator = { version = "0.3.2", optional = true }
|
||||||
jemalloc-ctl = { version = "0.3.2", optional = true }
|
jemalloc-ctl = { version = "0.3.2", optional = true }
|
||||||
|
|
||||||
|
|
|
@ -24,7 +24,7 @@ pub use crate::memory_usage::{Bytes, MemoryUsage};
|
||||||
|
|
||||||
// We use jemalloc mainly to get heap usage statistics, actual performance
|
// We use jemalloc mainly to get heap usage statistics, actual performance
|
||||||
// difference is not measures.
|
// difference is not measures.
|
||||||
#[cfg(feature = "jemalloc")]
|
#[cfg(all(feature = "jemalloc", not(target_env = "msvc")))]
|
||||||
#[global_allocator]
|
#[global_allocator]
|
||||||
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
|
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ pub struct MemoryUsage {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MemoryUsage {
|
impl MemoryUsage {
|
||||||
#[cfg(feature = "jemalloc")]
|
#[cfg(all(feature = "jemalloc", not(target_env = "msvc")))]
|
||||||
pub fn current() -> MemoryUsage {
|
pub fn current() -> MemoryUsage {
|
||||||
jemalloc_ctl::epoch::advance().unwrap();
|
jemalloc_ctl::epoch::advance().unwrap();
|
||||||
MemoryUsage {
|
MemoryUsage {
|
||||||
|
@ -17,7 +17,7 @@ impl MemoryUsage {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(feature = "jemalloc"))]
|
#[cfg(any(not(feature = "jemalloc"), target_env = "msvc"))]
|
||||||
pub fn current() -> MemoryUsage {
|
pub fn current() -> MemoryUsage {
|
||||||
MemoryUsage { allocated: Bytes(0), resident: Bytes(0) }
|
MemoryUsage { allocated: Bytes(0), resident: Bytes(0) }
|
||||||
}
|
}
|
||||||
|
|
|
@ -358,7 +358,7 @@ fn replace_children<N: AstNode>(
|
||||||
fn test_increase_indent() {
|
fn test_increase_indent() {
|
||||||
let arm_list = {
|
let arm_list = {
|
||||||
let arm = make::match_arm(iter::once(make::placeholder_pat().into()), make::expr_unit());
|
let arm = make::match_arm(iter::once(make::placeholder_pat().into()), make::expr_unit());
|
||||||
make::match_arm_list(vec![arm.clone(), arm].into_iter())
|
make::match_arm_list(vec![arm.clone(), arm])
|
||||||
};
|
};
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
arm_list.syntax().to_string(),
|
arm_list.syntax().to_string(),
|
||||||
|
|
|
@ -4,6 +4,10 @@ use itertools::Itertools;
|
||||||
|
|
||||||
use crate::{ast, AstNode, SourceFile};
|
use crate::{ast, AstNode, SourceFile};
|
||||||
|
|
||||||
|
pub fn name(text: &str) -> ast::Name {
|
||||||
|
ast_from_text(&format!("mod {};", text))
|
||||||
|
}
|
||||||
|
|
||||||
pub fn name_ref(text: &str) -> ast::NameRef {
|
pub fn name_ref(text: &str) -> ast::NameRef {
|
||||||
ast_from_text(&format!("fn f() {{ {}; }}", text))
|
ast_from_text(&format!("fn f() {{ {}; }}", text))
|
||||||
}
|
}
|
||||||
|
@ -43,6 +47,21 @@ pub fn expr_unit() -> ast::Expr {
|
||||||
pub fn expr_unimplemented() -> ast::Expr {
|
pub fn expr_unimplemented() -> ast::Expr {
|
||||||
expr_from_text("unimplemented!()")
|
expr_from_text("unimplemented!()")
|
||||||
}
|
}
|
||||||
|
pub fn expr_path(path: ast::Path) -> ast::Expr {
|
||||||
|
expr_from_text(&path.syntax().to_string())
|
||||||
|
}
|
||||||
|
pub fn expr_continue() -> ast::Expr {
|
||||||
|
expr_from_text("continue")
|
||||||
|
}
|
||||||
|
pub fn expr_break() -> ast::Expr {
|
||||||
|
expr_from_text("break")
|
||||||
|
}
|
||||||
|
pub fn expr_return() -> ast::Expr {
|
||||||
|
expr_from_text("return")
|
||||||
|
}
|
||||||
|
pub fn expr_match(expr: ast::Expr, match_arm_list: ast::MatchArmList) -> ast::Expr {
|
||||||
|
expr_from_text(&format!("match {} {}", expr.syntax(), match_arm_list.syntax()))
|
||||||
|
}
|
||||||
fn expr_from_text(text: &str) -> ast::Expr {
|
fn expr_from_text(text: &str) -> ast::Expr {
|
||||||
ast_from_text(&format!("const C: () = {};", text))
|
ast_from_text(&format!("const C: () = {};", text))
|
||||||
}
|
}
|
||||||
|
@ -65,9 +84,9 @@ pub fn placeholder_pat() -> ast::PlaceholderPat {
|
||||||
|
|
||||||
pub fn tuple_struct_pat(
|
pub fn tuple_struct_pat(
|
||||||
path: ast::Path,
|
path: ast::Path,
|
||||||
pats: impl Iterator<Item = ast::Pat>,
|
pats: impl IntoIterator<Item = ast::Pat>,
|
||||||
) -> ast::TupleStructPat {
|
) -> ast::TupleStructPat {
|
||||||
let pats_str = pats.map(|p| p.syntax().to_string()).join(", ");
|
let pats_str = pats.into_iter().map(|p| p.syntax().to_string()).join(", ");
|
||||||
return from_text(&format!("{}({})", path.syntax(), pats_str));
|
return from_text(&format!("{}({})", path.syntax(), pats_str));
|
||||||
|
|
||||||
fn from_text(text: &str) -> ast::TupleStructPat {
|
fn from_text(text: &str) -> ast::TupleStructPat {
|
||||||
|
@ -75,8 +94,8 @@ pub fn tuple_struct_pat(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn record_pat(path: ast::Path, pats: impl Iterator<Item = ast::Pat>) -> ast::RecordPat {
|
pub fn record_pat(path: ast::Path, pats: impl IntoIterator<Item = ast::Pat>) -> ast::RecordPat {
|
||||||
let pats_str = pats.map(|p| p.syntax().to_string()).join(", ");
|
let pats_str = pats.into_iter().map(|p| p.syntax().to_string()).join(", ");
|
||||||
return from_text(&format!("{} {{ {} }}", path.syntax(), pats_str));
|
return from_text(&format!("{} {{ {} }}", path.syntax(), pats_str));
|
||||||
|
|
||||||
fn from_text(text: &str) -> ast::RecordPat {
|
fn from_text(text: &str) -> ast::RecordPat {
|
||||||
|
@ -92,8 +111,8 @@ pub fn path_pat(path: ast::Path) -> ast::PathPat {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn match_arm(pats: impl Iterator<Item = ast::Pat>, expr: ast::Expr) -> ast::MatchArm {
|
pub fn match_arm(pats: impl IntoIterator<Item = ast::Pat>, expr: ast::Expr) -> ast::MatchArm {
|
||||||
let pats_str = pats.map(|p| p.syntax().to_string()).join(" | ");
|
let pats_str = pats.into_iter().map(|p| p.syntax().to_string()).join(" | ");
|
||||||
return from_text(&format!("{} => {}", pats_str, expr.syntax()));
|
return from_text(&format!("{} => {}", pats_str, expr.syntax()));
|
||||||
|
|
||||||
fn from_text(text: &str) -> ast::MatchArm {
|
fn from_text(text: &str) -> ast::MatchArm {
|
||||||
|
@ -101,8 +120,8 @@ pub fn match_arm(pats: impl Iterator<Item = ast::Pat>, expr: ast::Expr) -> ast::
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn match_arm_list(arms: impl Iterator<Item = ast::MatchArm>) -> ast::MatchArmList {
|
pub fn match_arm_list(arms: impl IntoIterator<Item = ast::MatchArm>) -> ast::MatchArmList {
|
||||||
let arms_str = arms.map(|arm| format!("\n {}", arm.syntax())).join(",");
|
let arms_str = arms.into_iter().map(|arm| format!("\n {}", arm.syntax())).join(",");
|
||||||
return from_text(&format!("{},\n", arms_str));
|
return from_text(&format!("{},\n", arms_str));
|
||||||
|
|
||||||
fn from_text(text: &str) -> ast::MatchArmList {
|
fn from_text(text: &str) -> ast::MatchArmList {
|
||||||
|
@ -110,25 +129,11 @@ pub fn match_arm_list(arms: impl Iterator<Item = ast::MatchArm>) -> ast::MatchAr
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn let_match_early(expr: ast::Expr, path: &str, early_expression: &str) -> ast::LetStmt {
|
pub fn where_pred(
|
||||||
return from_text(&format!(
|
path: ast::Path,
|
||||||
r#"let {} = match {} {{
|
bounds: impl IntoIterator<Item = ast::TypeBound>,
|
||||||
{}(it) => it,
|
) -> ast::WherePred {
|
||||||
None => {},
|
let bounds = bounds.into_iter().map(|b| b.syntax().to_string()).join(" + ");
|
||||||
}};"#,
|
|
||||||
expr.syntax().text(),
|
|
||||||
expr.syntax().text(),
|
|
||||||
path,
|
|
||||||
early_expression
|
|
||||||
));
|
|
||||||
|
|
||||||
fn from_text(text: &str) -> ast::LetStmt {
|
|
||||||
ast_from_text(&format!("fn f() {{ {} }}", text))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn where_pred(path: ast::Path, bounds: impl Iterator<Item = ast::TypeBound>) -> ast::WherePred {
|
|
||||||
let bounds = bounds.map(|b| b.syntax().to_string()).join(" + ");
|
|
||||||
return from_text(&format!("{}: {}", path.syntax(), bounds));
|
return from_text(&format!("{}: {}", path.syntax(), bounds));
|
||||||
|
|
||||||
fn from_text(text: &str) -> ast::WherePred {
|
fn from_text(text: &str) -> ast::WherePred {
|
||||||
|
@ -136,8 +141,8 @@ pub fn where_pred(path: ast::Path, bounds: impl Iterator<Item = ast::TypeBound>)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn where_clause(preds: impl Iterator<Item = ast::WherePred>) -> ast::WhereClause {
|
pub fn where_clause(preds: impl IntoIterator<Item = ast::WherePred>) -> ast::WhereClause {
|
||||||
let preds = preds.map(|p| p.syntax().to_string()).join(", ");
|
let preds = preds.into_iter().map(|p| p.syntax().to_string()).join(", ");
|
||||||
return from_text(preds.as_str());
|
return from_text(preds.as_str());
|
||||||
|
|
||||||
fn from_text(text: &str) -> ast::WhereClause {
|
fn from_text(text: &str) -> ast::WhereClause {
|
||||||
|
@ -153,6 +158,14 @@ pub fn if_expression(condition: &ast::Expr, statement: &str) -> ast::IfExpr {
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn let_stmt(pattern: ast::Pat, initializer: Option<ast::Expr>) -> ast::LetStmt {
|
||||||
|
let text = match initializer {
|
||||||
|
Some(it) => format!("let {} = {};", pattern.syntax(), it.syntax()),
|
||||||
|
None => format!("let {};", pattern.syntax()),
|
||||||
|
};
|
||||||
|
ast_from_text(&format!("fn f() {{ {} }}", text))
|
||||||
|
}
|
||||||
|
|
||||||
fn ast_from_text<N: AstNode>(text: &str) -> N {
|
fn ast_from_text<N: AstNode>(text: &str) -> N {
|
||||||
let parse = SourceFile::parse(text);
|
let parse = SourceFile::parse(text);
|
||||||
let res = parse.tree().syntax().descendants().find_map(N::cast).unwrap();
|
let res = parse.tree().syntax().descendants().find_map(N::cast).unwrap();
|
||||||
|
|
|
@ -12,3 +12,4 @@ quote = "1.0.2"
|
||||||
proc-macro2 = "1.0.1"
|
proc-macro2 = "1.0.1"
|
||||||
ron = "0.5.1"
|
ron = "0.5.1"
|
||||||
serde = { version = "1.0.0", features = ["derive"] }
|
serde = { version = "1.0.0", features = ["derive"] }
|
||||||
|
anyhow = "1.0.19"
|
||||||
|
|
|
@ -19,10 +19,10 @@ fn update_staged() -> Result<()> {
|
||||||
.current_dir(&root)
|
.current_dir(&root)
|
||||||
.output()?;
|
.output()?;
|
||||||
if !output.status.success() {
|
if !output.status.success() {
|
||||||
Err(format!(
|
anyhow::bail!(
|
||||||
"`git diff --diff-filter=MAR --name-only --cached` exited with {}",
|
"`git diff --diff-filter=MAR --name-only --cached` exited with {}",
|
||||||
output.status
|
output.status
|
||||||
))?;
|
);
|
||||||
}
|
}
|
||||||
for line in String::from_utf8(output.stdout)?.lines() {
|
for line in String::from_utf8(output.stdout)?.lines() {
|
||||||
run(&format!("git update-index --add {}", root.join(line).to_string_lossy()), ".")?;
|
run(&format!("git update-index --add {}", root.join(line).to_string_lossy()), ".")?;
|
||||||
|
|
|
@ -52,7 +52,7 @@ fn update(path: &Path, contents: &str, mode: Mode) -> Result<()> {
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
if mode == Mode::Verify {
|
if mode == Mode::Verify {
|
||||||
Err(format!("`{}` is not up-to-date", path.display()))?;
|
anyhow::bail!("`{}` is not up-to-date", path.display());
|
||||||
}
|
}
|
||||||
eprintln!("updating {}", path.display());
|
eprintln!("updating {}", path.display());
|
||||||
fs::write(path, contents)?;
|
fs::write(path, contents)?;
|
||||||
|
@ -101,10 +101,8 @@ fn do_extract_comment_blocks(text: &str, allow_blocks_with_empty_lins: bool) ->
|
||||||
let is_comment = line.starts_with(prefix);
|
let is_comment = line.starts_with(prefix);
|
||||||
if is_comment {
|
if is_comment {
|
||||||
block.push(line[prefix.len()..].to_string());
|
block.push(line[prefix.len()..].to_string());
|
||||||
} else {
|
} else if !block.is_empty() {
|
||||||
if !block.is_empty() {
|
res.push(mem::replace(&mut block, Vec::new()));
|
||||||
res.push(mem::replace(&mut block, Vec::new()))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !block.is_empty() {
|
if !block.is_empty() {
|
||||||
|
|
|
@ -102,10 +102,10 @@ fn tests_from_dir(dir: &Path) -> Result<Tests> {
|
||||||
for test in collect_tests(&text) {
|
for test in collect_tests(&text) {
|
||||||
if test.ok {
|
if test.ok {
|
||||||
if let Some(old_test) = res.ok.insert(test.name.clone(), test) {
|
if let Some(old_test) = res.ok.insert(test.name.clone(), test) {
|
||||||
return Err(format!("Duplicate test: {}", old_test.name).into());
|
anyhow::bail!("Duplicate test: {}", old_test.name);
|
||||||
}
|
}
|
||||||
} else if let Some(old_test) = res.err.insert(test.name.clone(), test) {
|
} else if let Some(old_test) = res.err.insert(test.name.clone(), test) {
|
||||||
return Err(format!("Duplicate test: {}", old_test.name).into());
|
anyhow::bail!("Duplicate test: {}", old_test.name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -2,10 +2,10 @@
|
||||||
|
|
||||||
pub mod codegen;
|
pub mod codegen;
|
||||||
|
|
||||||
|
use anyhow::Context;
|
||||||
|
pub use anyhow::Result;
|
||||||
use std::{
|
use std::{
|
||||||
env,
|
env, fs,
|
||||||
error::Error,
|
|
||||||
fs,
|
|
||||||
io::{Error as IoError, ErrorKind},
|
io::{Error as IoError, ErrorKind},
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
process::{Command, Output, Stdio},
|
process::{Command, Output, Stdio},
|
||||||
|
@ -13,8 +13,6 @@ use std::{
|
||||||
|
|
||||||
use crate::codegen::Mode;
|
use crate::codegen::Mode;
|
||||||
|
|
||||||
pub type Result<T> = std::result::Result<T, Box<dyn Error>>;
|
|
||||||
|
|
||||||
const TOOLCHAIN: &str = "stable";
|
const TOOLCHAIN: &str = "stable";
|
||||||
|
|
||||||
pub fn project_root() -> PathBuf {
|
pub fn project_root() -> PathBuf {
|
||||||
|
@ -69,7 +67,7 @@ pub fn run_rustfmt(mode: Mode) -> Result<()> {
|
||||||
.status()
|
.status()
|
||||||
{
|
{
|
||||||
Ok(status) if status.success() => (),
|
Ok(status) if status.success() => (),
|
||||||
_ => install_rustfmt()?,
|
_ => install_rustfmt().context("install rustfmt")?,
|
||||||
};
|
};
|
||||||
|
|
||||||
if mode == Mode::Verify {
|
if mode == Mode::Verify {
|
||||||
|
@ -112,7 +110,7 @@ pub fn run_clippy() -> Result<()> {
|
||||||
.status()
|
.status()
|
||||||
{
|
{
|
||||||
Ok(status) if status.success() => (),
|
Ok(status) if status.success() => (),
|
||||||
_ => install_clippy()?,
|
_ => install_clippy().context("install clippy")?,
|
||||||
};
|
};
|
||||||
|
|
||||||
let allowed_lints = [
|
let allowed_lints = [
|
||||||
|
@ -162,9 +160,9 @@ where
|
||||||
let exec = args.next().unwrap();
|
let exec = args.next().unwrap();
|
||||||
let mut cmd = Command::new(exec);
|
let mut cmd = Command::new(exec);
|
||||||
f(cmd.args(args).current_dir(proj_dir).stderr(Stdio::inherit()));
|
f(cmd.args(args).current_dir(proj_dir).stderr(Stdio::inherit()));
|
||||||
let output = cmd.output()?;
|
let output = cmd.output().with_context(|| format!("running `{}`", cmdline))?;
|
||||||
if !output.status.success() {
|
if !output.status.success() {
|
||||||
Err(format!("`{}` exited with {}", cmdline, output.status))?;
|
anyhow::bail!("`{}` exited with {}", cmdline, output.status);
|
||||||
}
|
}
|
||||||
Ok(output)
|
Ok(output)
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
//! `.cargo/config`.
|
//! `.cargo/config`.
|
||||||
mod help;
|
mod help;
|
||||||
|
|
||||||
|
use anyhow::Context;
|
||||||
use core::fmt::Write;
|
use core::fmt::Write;
|
||||||
use core::str;
|
use core::str;
|
||||||
use pico_args::Arguments;
|
use pico_args::Arguments;
|
||||||
|
@ -19,7 +20,7 @@ use xtask::{
|
||||||
};
|
};
|
||||||
|
|
||||||
// Latest stable, feel free to send a PR if this lags behind.
|
// Latest stable, feel free to send a PR if this lags behind.
|
||||||
const REQUIRED_RUST_VERSION: u32 = 38;
|
const REQUIRED_RUST_VERSION: u32 = 39;
|
||||||
|
|
||||||
struct InstallOpt {
|
struct InstallOpt {
|
||||||
client: Option<ClientOpt>,
|
client: Option<ClientOpt>,
|
||||||
|
@ -113,21 +114,21 @@ fn handle_extra_flags(e: pico_args::Error) -> Result<()> {
|
||||||
write!(&mut invalid_flags, "{}, ", flag)?;
|
write!(&mut invalid_flags, "{}, ", flag)?;
|
||||||
}
|
}
|
||||||
let (invalid_flags, _) = invalid_flags.split_at(invalid_flags.len() - 2);
|
let (invalid_flags, _) = invalid_flags.split_at(invalid_flags.len() - 2);
|
||||||
Err(format!("Invalid flags: {}", invalid_flags).into())
|
anyhow::bail!("Invalid flags: {}", invalid_flags)
|
||||||
} else {
|
} else {
|
||||||
Err(e.to_string().into())
|
anyhow::bail!(e.to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn install(opts: InstallOpt) -> Result<()> {
|
fn install(opts: InstallOpt) -> Result<()> {
|
||||||
if cfg!(target_os = "macos") {
|
if cfg!(target_os = "macos") {
|
||||||
fix_path_for_mac()?
|
fix_path_for_mac().context("Fix path for mac")?
|
||||||
}
|
}
|
||||||
if let Some(server) = opts.server {
|
if let Some(server) = opts.server {
|
||||||
install_server(server)?;
|
install_server(server).context("install server")?;
|
||||||
}
|
}
|
||||||
if let Some(client) = opts.client {
|
if let Some(client) = opts.client {
|
||||||
install_client(client)?;
|
install_client(client).context("install client")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -139,7 +140,7 @@ fn fix_path_for_mac() -> Result<()> {
|
||||||
const ROOT_DIR: &str = "";
|
const ROOT_DIR: &str = "";
|
||||||
let home_dir = match env::var("HOME") {
|
let home_dir = match env::var("HOME") {
|
||||||
Ok(home) => home,
|
Ok(home) => home,
|
||||||
Err(e) => Err(format!("Failed getting HOME from environment with error: {}.", e))?,
|
Err(e) => anyhow::bail!("Failed getting HOME from environment with error: {}.", e),
|
||||||
};
|
};
|
||||||
|
|
||||||
[ROOT_DIR, &home_dir]
|
[ROOT_DIR, &home_dir]
|
||||||
|
@ -153,12 +154,12 @@ fn fix_path_for_mac() -> Result<()> {
|
||||||
if !vscode_path.is_empty() {
|
if !vscode_path.is_empty() {
|
||||||
let vars = match env::var_os("PATH") {
|
let vars = match env::var_os("PATH") {
|
||||||
Some(path) => path,
|
Some(path) => path,
|
||||||
None => Err("Could not get PATH variable from env.")?,
|
None => anyhow::bail!("Could not get PATH variable from env."),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut paths = env::split_paths(&vars).collect::<Vec<_>>();
|
let mut paths = env::split_paths(&vars).collect::<Vec<_>>();
|
||||||
paths.append(&mut vscode_path);
|
paths.append(&mut vscode_path);
|
||||||
let new_paths = env::join_paths(paths)?;
|
let new_paths = env::join_paths(paths).context("build env PATH")?;
|
||||||
env::set_var("PATH", &new_paths);
|
env::set_var("PATH", &new_paths);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -197,7 +198,7 @@ fn install_client(ClientOpt::VsCode: ClientOpt) -> Result<()> {
|
||||||
|
|
||||||
let code_binary = match code_binary {
|
let code_binary = match code_binary {
|
||||||
Some(it) => it,
|
Some(it) => it,
|
||||||
None => Err("Can't execute `code --version`. Perhaps it is not in $PATH?")?,
|
None => anyhow::bail!("Can't execute `code --version`. Perhaps it is not in $PATH?"),
|
||||||
};
|
};
|
||||||
|
|
||||||
Cmd {
|
Cmd {
|
||||||
|
@ -218,8 +219,10 @@ fn install_client(ClientOpt::VsCode: ClientOpt) -> Result<()> {
|
||||||
.run_with_output()?;
|
.run_with_output()?;
|
||||||
|
|
||||||
if !str::from_utf8(&output.stdout)?.contains("ra-lsp") {
|
if !str::from_utf8(&output.stdout)?.contains("ra-lsp") {
|
||||||
Err("Could not install the Visual Studio Code extension. \
|
anyhow::bail!(
|
||||||
Please make sure you have at least NodeJS 10.x installed and try again.")?;
|
"Could not install the Visual Studio Code extension. \
|
||||||
|
Please make sure you have at least NodeJS 10.x installed and try again."
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -239,7 +242,7 @@ fn install_server(opts: ServerOpt) -> Result<()> {
|
||||||
if old_rust {
|
if old_rust {
|
||||||
eprintln!(
|
eprintln!(
|
||||||
"\nWARNING: at least rust 1.{}.0 is required to compile rust-analyzer\n",
|
"\nWARNING: at least rust 1.{}.0 is required to compile rust-analyzer\n",
|
||||||
REQUIRED_RUST_VERSION
|
REQUIRED_RUST_VERSION,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -252,7 +255,7 @@ fn install_server(opts: ServerOpt) -> Result<()> {
|
||||||
if res.is_err() && old_rust {
|
if res.is_err() && old_rust {
|
||||||
eprintln!(
|
eprintln!(
|
||||||
"\nWARNING: at least rust 1.{}.0 is required to compile rust-analyzer\n",
|
"\nWARNING: at least rust 1.{}.0 is required to compile rust-analyzer\n",
|
||||||
REQUIRED_RUST_VERSION
|
REQUIRED_RUST_VERSION,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue