mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 13:03:31 +00:00
Merge branch 'master' of https://github.com/rust-analyzer/rust-analyzer into feature/themes
This commit is contained in:
commit
18a0937585
150 changed files with 3562 additions and 3081 deletions
326
Cargo.lock
generated
326
Cargo.lock
generated
|
@ -10,7 +10,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.22"
|
||||
version = "1.0.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
|
@ -105,9 +105,9 @@ version = "0.9.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_derive 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_derive 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 1.0.42 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -123,7 +123,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
[[package]]
|
||||
name = "chalk-derive"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478#a88cad7f0a69e05ba8f40b74c58a1c229c1b2478"
|
||||
source = "git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30#095cd38a4f16337913bba487f2055b9ca0179f30"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -133,9 +133,9 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "chalk-engine"
|
||||
version = "0.9.0"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478#a88cad7f0a69e05ba8f40b74c58a1c229c1b2478"
|
||||
source = "git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30#095cd38a4f16337913bba487f2055b9ca0179f30"
|
||||
dependencies = [
|
||||
"chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-macros 0.1.1 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)",
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"stacker 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
@ -143,18 +143,18 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "chalk-ir"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478#a88cad7f0a69e05ba8f40b74c58a1c229c1b2478"
|
||||
source = "git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30#095cd38a4f16337913bba487f2055b9ca0179f30"
|
||||
dependencies = [
|
||||
"chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-derive 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)",
|
||||
"chalk-engine 0.9.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)",
|
||||
"chalk-macros 0.1.1 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)",
|
||||
"lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "chalk-macros"
|
||||
version = "0.1.1"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478#a88cad7f0a69e05ba8f40b74c58a1c229c1b2478"
|
||||
source = "git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30#095cd38a4f16337913bba487f2055b9ca0179f30"
|
||||
dependencies = [
|
||||
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
@ -162,38 +162,37 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "chalk-rust-ir"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478#a88cad7f0a69e05ba8f40b74c58a1c229c1b2478"
|
||||
source = "git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30#095cd38a4f16337913bba487f2055b9ca0179f30"
|
||||
dependencies = [
|
||||
"chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-derive 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)",
|
||||
"chalk-engine 0.9.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)",
|
||||
"chalk-ir 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)",
|
||||
"chalk-macros 0.1.1 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "chalk-solve"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478#a88cad7f0a69e05ba8f40b74c58a1c229c1b2478"
|
||||
source = "git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30#095cd38a4f16337913bba487f2055b9ca0179f30"
|
||||
dependencies = [
|
||||
"chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-derive 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)",
|
||||
"chalk-engine 0.9.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)",
|
||||
"chalk-ir 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)",
|
||||
"chalk-macros 0.1.1 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)",
|
||||
"chalk-rust-ir 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)",
|
||||
"ena 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "chrono"
|
||||
version = "0.4.9"
|
||||
version = "0.4.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num-traits 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
|
@ -227,7 +226,6 @@ dependencies = [
|
|||
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"termios 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
|
@ -292,16 +290,6 @@ dependencies = [
|
|||
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derive-new"
|
||||
version = "0.5.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "difference"
|
||||
version = "2.0.0"
|
||||
|
@ -356,7 +344,7 @@ name = "flexi_logger"
|
|||
version = "0.14.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"chrono 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -472,7 +460,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
dependencies = [
|
||||
"matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-normalization 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-normalization 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -483,17 +471,6 @@ dependencies = [
|
|||
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "indicatif"
|
||||
version = "0.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"console 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"number_prefix 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "inotify"
|
||||
version = "0.6.1"
|
||||
|
@ -520,8 +497,8 @@ dependencies = [
|
|||
"console 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 1.0.42 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"uuid 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
@ -536,7 +513,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "itertools"
|
||||
version = "0.8.1"
|
||||
version = "0.8.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -622,7 +599,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
|
||||
[[package]]
|
||||
name = "lock_api"
|
||||
version = "0.3.1"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -643,8 +620,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
dependencies = [
|
||||
"crossbeam-channel 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 1.0.42 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -653,8 +630,8 @@ version = "0.61.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 1.0.42 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_repr 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
@ -664,11 +641,6 @@ name = "matches"
|
|||
version = "0.1.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "maybe-uninit"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.2.1"
|
||||
|
@ -684,9 +656,10 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "mio"
|
||||
version = "0.6.19"
|
||||
version = "0.6.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -706,7 +679,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
dependencies = [
|
||||
"lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"mio 0.6.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
|
@ -743,7 +716,7 @@ dependencies = [
|
|||
"inotify 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"mio 0.6.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"mio-extras 2.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -755,12 +728,12 @@ version = "0.1.41"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num-traits 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-traits"
|
||||
version = "0.2.9"
|
||||
version = "0.2.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -775,11 +748,6 @@ dependencies = [
|
|||
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "number_prefix"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.2.0"
|
||||
|
@ -792,25 +760,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.9.0"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"lock_api 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot_core 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lock_api 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot_core 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot_core"
|
||||
version = "0.6.2"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"smallvec 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
|
@ -885,7 +851,7 @@ dependencies = [
|
|||
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num-traits 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -923,7 +889,7 @@ name = "ra_assists"
|
|||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_db 0.1.0",
|
||||
"ra_fmt 0.1.0",
|
||||
|
@ -941,9 +907,9 @@ dependencies = [
|
|||
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_db 0.1.0",
|
||||
"ra_hir 0.1.0",
|
||||
"ra_ide_api 0.1.0",
|
||||
"ra_ide 0.1.0",
|
||||
"ra_project_model 0.1.0",
|
||||
"ra_vfs 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_vfs 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_vfs_glob 0.1.0",
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
@ -963,12 +929,11 @@ name = "ra_cli"
|
|||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"flexi_logger 0.14.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"indicatif 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"pico-args 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_batch 0.1.0",
|
||||
"ra_db 0.1.0",
|
||||
"ra_hir 0.1.0",
|
||||
"ra_ide_api 0.1.0",
|
||||
"ra_ide 0.1.0",
|
||||
"ra_prof 0.1.0",
|
||||
"ra_syntax 0.1.0",
|
||||
]
|
||||
|
@ -982,7 +947,7 @@ dependencies = [
|
|||
"ra_syntax 0.1.0",
|
||||
"relative-path 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"salsa 0.13.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"salsa 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"test_utils 0.1.0",
|
||||
]
|
||||
|
||||
|
@ -990,7 +955,7 @@ dependencies = [
|
|||
name = "ra_fmt"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"itertools 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_syntax 0.1.0",
|
||||
]
|
||||
|
||||
|
@ -998,27 +963,13 @@ dependencies = [
|
|||
name = "ra_hir"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-solve 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"ena 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_arena 0.1.0",
|
||||
"ra_cfg 0.1.0",
|
||||
"ra_db 0.1.0",
|
||||
"ra_hir_def 0.1.0",
|
||||
"ra_hir_expand 0.1.0",
|
||||
"ra_mbe 0.1.0",
|
||||
"ra_prof 0.1.0",
|
||||
"ra_hir_ty 0.1.0",
|
||||
"ra_syntax 0.1.0",
|
||||
"ra_tt 0.1.0",
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"test_utils 0.1.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1055,13 +1006,35 @@ dependencies = [
|
|||
]
|
||||
|
||||
[[package]]
|
||||
name = "ra_ide_api"
|
||||
name = "ra_hir_ty"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"chalk-ir 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)",
|
||||
"chalk-rust-ir 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)",
|
||||
"chalk-solve 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)",
|
||||
"ena 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_arena 0.1.0",
|
||||
"ra_db 0.1.0",
|
||||
"ra_hir_def 0.1.0",
|
||||
"ra_hir_expand 0.1.0",
|
||||
"ra_prof 0.1.0",
|
||||
"ra_syntax 0.1.0",
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"test_utils 0.1.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ra_ide"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"fst 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -1092,18 +1065,18 @@ dependencies = [
|
|||
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lsp-server 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lsp-types 0.61.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_ide_api 0.1.0",
|
||||
"parking_lot 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_ide 0.1.0",
|
||||
"ra_prof 0.1.0",
|
||||
"ra_project_model 0.1.0",
|
||||
"ra_syntax 0.1.0",
|
||||
"ra_text_edit 0.1.0",
|
||||
"ra_vfs 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_vfs 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_vfs_glob 0.1.0",
|
||||
"relative-path 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 1.0.42 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"test_utils 0.1.0",
|
||||
"threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -1134,7 +1107,7 @@ name = "ra_prof"
|
|||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"backtrace 0.3.40 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"jemalloc-ctl 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"jemallocator 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -1150,8 +1123,8 @@ dependencies = [
|
|||
"ra_cfg 0.1.0",
|
||||
"ra_db 0.1.0",
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 1.0.42 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1159,11 +1132,11 @@ name = "ra_syntax"
|
|||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_parser 0.1.0",
|
||||
"ra_text_edit 0.1.0",
|
||||
"rowan 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rowan 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc_lexer 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"smol_str 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -1189,14 +1162,14 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ra_vfs"
|
||||
version = "0.5.1"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"crossbeam-channel 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"jod-thread 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"notify 4.0.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"relative-path 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -1207,7 +1180,7 @@ name = "ra_vfs_glob"
|
|||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"globset 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_vfs 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_vfs 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1423,17 +1396,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
dependencies = [
|
||||
"base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rowan"
|
||||
version = "0.7.0"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"smol_str 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"thin-dst 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1472,23 +1446,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
|
||||
[[package]]
|
||||
name = "salsa"
|
||||
version = "0.13.2"
|
||||
version = "0.14.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"crossbeam 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"derive-new 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"indexmap 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"salsa-macros 0.13.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"salsa-macros 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"smallvec 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "salsa-macros"
|
||||
version = "0.13.2"
|
||||
version = "0.14.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -1516,7 +1489,7 @@ version = "0.9.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1526,15 +1499,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.102"
|
||||
version = "1.0.103"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"serde_derive 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_derive 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.102"
|
||||
version = "1.0.103"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -1544,12 +1517,12 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.41"
|
||||
version = "1.0.42"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ryu 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1569,7 +1542,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
dependencies = [
|
||||
"dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
|
@ -1578,14 +1551,6 @@ name = "slab"
|
|||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "0.6.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.0.0"
|
||||
|
@ -1596,7 +1561,7 @@ name = "smol_str"
|
|||
version = "0.1.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1652,7 +1617,7 @@ name = "test_utils"
|
|||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 1.0.42 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
|
@ -1661,6 +1626,11 @@ name = "text_unit"
|
|||
version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "thin-dst"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "thread_local"
|
||||
version = "0.3.6"
|
||||
|
@ -1705,10 +1675,10 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "unicode-normalization"
|
||||
version = "0.1.9"
|
||||
version = "0.1.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"smallvec 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1716,11 +1686,6 @@ name = "unicode-segmentation"
|
|||
version = "1.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-width"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-xid"
|
||||
version = "0.2.0"
|
||||
|
@ -1734,7 +1699,7 @@ dependencies = [
|
|||
"idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1743,7 +1708,7 @@ version = "0.8.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1816,12 +1781,12 @@ dependencies = [
|
|||
name = "xtask"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow 1.0.22 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"anyhow 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"pico-args 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ron 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
|
@ -1840,7 +1805,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
|
||||
[metadata]
|
||||
"checksum aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d"
|
||||
"checksum anyhow 1.0.22 (registry+https://github.com/rust-lang/crates.io-index)" = "e19f23ab207147bbdbcdfa7f7e4ca5e84963d79bae3937074682177ab9150968"
|
||||
"checksum anyhow 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)" = "b412394828b7ca486b362f300b762d8e43dafd6f0d727b63f1cd2ade207c6cef"
|
||||
"checksum arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cff77d8686867eceff3105329d4698d96c2391c176d5d03adc90c7389162b5b8"
|
||||
"checksum atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "1803c647a3ec87095e7ae7acfca019e98de5ec9a7d01343f611cf3152ed71a90"
|
||||
"checksum autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2"
|
||||
|
@ -1856,13 +1821,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
"checksum cargo_metadata 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8d2d1617e838936c0d2323a65cc151e03ae19a7678dd24f72bccf27119b90a5d"
|
||||
"checksum cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)" = "aa87058dce70a3ff5621797f1506cb837edd02ac4c0ae642b4542dce802908b8"
|
||||
"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
|
||||
"checksum chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)" = "<none>"
|
||||
"checksum chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)" = "<none>"
|
||||
"checksum chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)" = "<none>"
|
||||
"checksum chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)" = "<none>"
|
||||
"checksum chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)" = "<none>"
|
||||
"checksum chalk-solve 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)" = "<none>"
|
||||
"checksum chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e8493056968583b0193c1bb04d6f7684586f3726992d6c573261941a895dbd68"
|
||||
"checksum chalk-derive 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)" = "<none>"
|
||||
"checksum chalk-engine 0.9.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)" = "<none>"
|
||||
"checksum chalk-ir 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)" = "<none>"
|
||||
"checksum chalk-macros 0.1.1 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)" = "<none>"
|
||||
"checksum chalk-rust-ir 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)" = "<none>"
|
||||
"checksum chalk-solve 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)" = "<none>"
|
||||
"checksum chrono 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "31850b4a4d6bae316f7a09e691c944c28299298837edc0a03f755618c23cbc01"
|
||||
"checksum clicolors-control 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90082ee5dcdd64dc4e9e0d37fbf3ee325419e39c0092191e0393df65518f741e"
|
||||
"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
|
||||
"checksum console 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f5d540c2d34ac9dd0deb5f3b5f54c36c79efa78f6b3ad19106a554d07a7b5d9f"
|
||||
|
@ -1872,7 +1837,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
"checksum crossbeam-epoch 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5064ebdbf05ce3cb95e45c8b086f72263f4166b29b97f6baff7ef7fe047b55ac"
|
||||
"checksum crossbeam-queue 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "dfd6515864a82d2f877b42813d4553292c6659498c9a2aa31bab5a15243c2700"
|
||||
"checksum crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ce446db02cdc3165b94ae73111e570793400d0794e46125cc4056c81cbb039f4"
|
||||
"checksum derive-new 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)" = "71f31892cd5c62e414316f2963c5689242c43d8e7bbcaaeca97e5e28c95d91d9"
|
||||
"checksum difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198"
|
||||
"checksum drop_bomb 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "69b26e475fd29098530e709294e94e661974c851aed42512793f120fed4e199f"
|
||||
"checksum dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "ea57b42383d091c85abcc2706240b94ab2a8fa1fc81c10ff23c4de06e2a90b5e"
|
||||
|
@ -1898,12 +1862,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
"checksum hermit-abi 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "307c3c9f937f38e3534b1d6447ecf090cafcc9744e4a6360e8b037b2cf5af120"
|
||||
"checksum idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9"
|
||||
"checksum indexmap 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712d7b3ea5827fcb9d4fda14bf4da5f136f0db2ae9c8f4bd4e2d1c6fde4e6db2"
|
||||
"checksum indicatif 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8572bccfb0665e70b7faf44ee28841b8e0823450cd4ad562a76b5a3c4bf48487"
|
||||
"checksum inotify 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "40b54539f3910d6f84fbf9a643efd6e3aa6e4f001426c0329576128255994718"
|
||||
"checksum inotify-sys 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e74a1aa87c59aeff6ef2cc2fa62d41bc43f54952f55652656b18a02fd5e356c0"
|
||||
"checksum insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0d499dc062e841590a67230d853bce62d0abeb91304927871670b7c55c461349"
|
||||
"checksum iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e"
|
||||
"checksum itertools 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "87fa75c9dea7b07be3138c49abbb83fd4bea199b5cdc76f9804458edc5da0d6e"
|
||||
"checksum itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f56a2d0bc861f9165be4eb3442afd3c236d8a98afd426f65d92324ae1091a484"
|
||||
"checksum itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "501266b7edd0174f8530248f87f99c88fbe60ca4ef3dd486835b8d8d53136f7f"
|
||||
"checksum jemalloc-ctl 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c502a5ff9dd2924f1ed32ba96e3b65735d837b4bfd978d3161b1702e66aca4b7"
|
||||
"checksum jemalloc-sys 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0d3b9f3f5c9b31aa0f5ed3260385ac205db665baa41d49bb8338008ae94ede45"
|
||||
|
@ -1916,27 +1879,25 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
"checksum lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f"
|
||||
"checksum libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)" = "1a31a0627fdf1f6a39ec0dd577e101440b7db22672c0901fe00a9a6fbb5c24e8"
|
||||
"checksum linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ae91b68aebc4ddb91978b11a1b02ddd8602a05ec19002801c5666000e05e0f83"
|
||||
"checksum lock_api 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f8912e782533a93a167888781b836336a6ca5da6175c05944c86cf28c31104dc"
|
||||
"checksum lock_api 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e57b3997725d2b60dbec1297f6c2e2957cc383db1cebd6be812163f969c7d586"
|
||||
"checksum log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7"
|
||||
"checksum lsp-server 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0ba36405bd742139ab79c246ca5adb7fde2fe1a0f495e2c8e2f607b607dedb12"
|
||||
"checksum lsp-types 0.61.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fa3268fbe8beb2795c2fb327bf44f4f3d24f5fe9ebc18d7e2980afd444d72bcf"
|
||||
"checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08"
|
||||
"checksum maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00"
|
||||
"checksum memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e"
|
||||
"checksum memoffset 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "75189eb85871ea5c2e2c15abbdd541185f63b408415e5051f5cac122d8c774b9"
|
||||
"checksum mio 0.6.19 (registry+https://github.com/rust-lang/crates.io-index)" = "83f51996a3ed004ef184e16818edc51fadffe8e7ca68be67f9dee67d84d0ff23"
|
||||
"checksum mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)" = "302dec22bcf6bae6dfb69c647187f4b4d0fb6f535521f7bc022430ce8e12008f"
|
||||
"checksum mio-extras 2.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "46e73a04c2fa6250b8d802134d56d554a9ec2922bf977777c805ea5def61ce40"
|
||||
"checksum miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919"
|
||||
"checksum net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "42550d9fb7b6684a6d404d9fa7250c2eb2646df731d1c06afc06dcee9e1bcf88"
|
||||
"checksum notify 4.0.14 (registry+https://github.com/rust-lang/crates.io-index)" = "199628fc33b21bc767baa057490b00b382ecbae030803a7b36292422d15b778b"
|
||||
"checksum num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)" = "b85e541ef8255f6cf42bbfe4ef361305c6c135d10919ecc26126c4e5ae94bc09"
|
||||
"checksum num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "443c53b3c3531dfcbfa499d8893944db78474ad7a1d87fa2d94d1a2231693ac6"
|
||||
"checksum num-traits 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c81ffc11c212fa327657cb19dd85eb7419e163b5b076bede2bdb5c974c07e4"
|
||||
"checksum num_cpus 1.11.1 (registry+https://github.com/rust-lang/crates.io-index)" = "76dac5ed2a876980778b8b85f75a71b6cbf0db0b1232ee12f826bccb00d09d72"
|
||||
"checksum number_prefix 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "17b02fc0ff9a9e4b35b3342880f48e896ebf69f2967921fe8646bf5b7125956a"
|
||||
"checksum once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "891f486f630e5c5a4916c7e16c4b24a53e78c860b646e9f8e005e4f16847bfed"
|
||||
"checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063"
|
||||
"checksum parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f842b1982eb6c2fe34036a4fbfb06dd185a3f5c8edfaacdf7d1ea10b07de6252"
|
||||
"checksum parking_lot_core 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b876b1b9e7ac6e1a74a6da34d25c42e17e8862aa409cbbbdcfc8d86c6f3bc62b"
|
||||
"checksum parking_lot 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "92e98c49ab0b7ce5b222f2cc9193fc4efe11c6d0bd4f648e374684a6857b1cfc"
|
||||
"checksum parking_lot_core 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7582838484df45743c8434fbff785e8edf260c28748353d44bc0da32e0ceabf1"
|
||||
"checksum paste 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "423a519e1c6e828f1e73b720f9d9ed2fa643dce8a7737fb43235ce0b41eeaa49"
|
||||
"checksum paste-impl 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4214c9e912ef61bf42b81ba9a47e8aad1b2ffaf739ab162bf96d1e011f54e6c5"
|
||||
"checksum percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e"
|
||||
|
@ -1949,7 +1910,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
"checksum psm 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "b14fc68b454f875abc8354c2555e1d56596f74833ddc0f77f87f4871ed6a30e0"
|
||||
"checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0"
|
||||
"checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe"
|
||||
"checksum ra_vfs 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a95d3d1edc70a98d9e42f145bc92c2071dfdc532571591c9eac407c0e4feb89"
|
||||
"checksum ra_vfs 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bc898f237e4b4498959ae0100c688793a23e77624d44ef710ba70094217f98e0"
|
||||
"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca"
|
||||
"checksum rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3ae1b169243eaf61759b8475a998f0a385e42042370f3a7dbaf35246eacc8412"
|
||||
"checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef"
|
||||
|
@ -1974,25 +1935,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
"checksum relative-path 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bedde000f40f2921ce439ea165c9c53fd629bfa115140c72e22aceacb4a21954"
|
||||
"checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e"
|
||||
"checksum ron 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2ece421e0c4129b90e4a35b6f625e472e96c552136f5093a2f4fa2bbb75a62d5"
|
||||
"checksum rowan 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d3a241900475bf2ba302061550ff50c82b45095ca95d23d1872345793fd42407"
|
||||
"checksum rowan 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ca620bbf9c48c92b5cef19f96354a309ac36b7d8ef7c591e66117335c8b1988b"
|
||||
"checksum rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783"
|
||||
"checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8"
|
||||
"checksum rustc_lexer 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c86aae0c77166108c01305ee1a36a1e77289d7dc6ca0a3cd91ff4992de2d16a5"
|
||||
"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
|
||||
"checksum ryu 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bfa8506c1de11c9c4e4c38863ccbe02a305c8188e85a05a784c9e11e1c3910c8"
|
||||
"checksum salsa 0.13.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ec0865bdd9d8e614686a0cbb76979c735810131d287eb1683e91e4e64a58c387"
|
||||
"checksum salsa-macros 0.13.2 (registry+https://github.com/rust-lang/crates.io-index)" = "cac182212d3a1db75ddc42399ff1461b258a694b8318ee7e0baf6c976e39efee"
|
||||
"checksum salsa 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4a006c56096acaaa5e82e5974c28d05ff1e84aa70615f19c53fecf8a1afb2fd2"
|
||||
"checksum salsa-macros 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)" = "038a09b6271446f1123f142fe7e5bef6d4687c4cf82e6986be574c2af3745530"
|
||||
"checksum same-file 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "585e8ddcedc187886a30fa705c47985c3fa88d06624095856b36ca0b82ff4421"
|
||||
"checksum scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b42e15e59b18a828bbf5c58ea01debb36b9b096346de35d941dcb89009f24a0d"
|
||||
"checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
|
||||
"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
|
||||
"checksum serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)" = "0c4b39bd9b0b087684013a792c59e3e07a46a01d2322518d8a1104641a0b1be0"
|
||||
"checksum serde_derive 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)" = "ca13fc1a832f793322228923fbb3aba9f3f44444898f835d31ad1b74fa0a2bf8"
|
||||
"checksum serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)" = "2f72eb2a68a7dc3f9a691bfda9305a1c017a6215e5a4545c258500d2099a37c2"
|
||||
"checksum serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)" = "1217f97ab8e8904b57dd22eb61cde455fa7446a9c1cf43966066da047c1f3702"
|
||||
"checksum serde_derive 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)" = "a8c6faef9a2e64b0064f48570289b4bf8823b7581f1d6157c1b52152306651d0"
|
||||
"checksum serde_json 1.0.42 (registry+https://github.com/rust-lang/crates.io-index)" = "1a3351dcbc1f067e2c92ab7c3c1f288ad1a4cffc470b5aaddb4c2e0a3ae80043"
|
||||
"checksum serde_repr 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "cd02c7587ec314570041b2754829f84d873ced14a96d1fd1823531e11db40573"
|
||||
"checksum serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)" = "691b17f19fc1ec9d94ec0b5864859290dff279dbd7b03f017afda54eb36c3c35"
|
||||
"checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
|
||||
"checksum smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "f7b0758c52e15a8b5e3691eae6cc559f08eee9406e548a4477ba4e67770a82b6"
|
||||
"checksum smallvec 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ecf3b85f68e8abaa7555aa5abdb1153079387e60b718283d732f03897fcfc86"
|
||||
"checksum smol_str 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "34836c9a295c62c2ce3514471117c5cb269891e8421b2aafdd910050576c4d8b"
|
||||
"checksum stacker 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "d96fc4f13a0ac088e9a3cd9af1cc8c5cc1ab5deb2145cef661267dfc9c542f8a"
|
||||
|
@ -2001,14 +1961,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
"checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9"
|
||||
"checksum termios 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "72b620c5ea021d75a735c943269bb07d30c9b77d6ac6b236bc8b5c496ef05625"
|
||||
"checksum text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e08bbcb7a3adbda0eb23431206b653bdad3d8dea311e72d36bf2215e27a42579"
|
||||
"checksum thin-dst 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c52fd98a9e4913c466d83381a59245691875d2f3e04611fca57f964bd8aa96e1"
|
||||
"checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b"
|
||||
"checksum threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e2f0c90a5f3459330ac8bc0d2f879c693bb7a2f59689c1083fc4ef83834da865"
|
||||
"checksum time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "db8dcfca086c1143c9270ac42a2bbd8a7ee477b78ac8e45b19abfb0cbede4b6f"
|
||||
"checksum unicase 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
|
||||
"checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
|
||||
"checksum unicode-normalization 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "09c8070a9942f5e7cfccd93f490fdebd230ee3c3c9f107cb25bad5351ef671cf"
|
||||
"checksum unicode-normalization 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b561e267b2326bb4cebfc0ef9e68355c7abe6c6f522aeac2f5bf95d56c59bdcf"
|
||||
"checksum unicode-segmentation 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e83e153d1053cbb5a118eeff7fd5be06ed99153f00dbcd8ae310c5fb2b22edc0"
|
||||
"checksum unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "7007dbd421b92cc6e28410fe7362e2e0a2503394908f417b68ec8d1c364c4e20"
|
||||
"checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
|
||||
"checksum url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "75b414f6c464c879d7f9babf951f23bc3743fb7313c081b2e6ca719067ea9d61"
|
||||
"checksum uuid 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9fde2f6a4bea1d6e007c4ad38c6839fa71cbb63b6dbf5b595aa38dc9b1093c11"
|
||||
|
|
|
@ -56,7 +56,7 @@ https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frls-2.2E0
|
|||
|
||||
## Quick Links
|
||||
|
||||
* API docs: https://rust-analyzer.github.io/rust-analyzer/ra_ide_api/
|
||||
* API docs: https://rust-analyzer.github.io/rust-analyzer/ra_ide/
|
||||
|
||||
|
||||
## License
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use hir::{db::HirDatabase, HirDisplay, Ty};
|
||||
use hir::{db::HirDatabase, HirDisplay};
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode, LetStmt, NameOwner},
|
||||
T,
|
||||
|
@ -43,7 +43,7 @@ pub(crate) fn add_explicit_type(ctx: AssistCtx<impl HirDatabase>) -> Option<Assi
|
|||
let analyzer = ctx.source_analyzer(stmt.syntax(), None);
|
||||
let ty = analyzer.type_of(db, &expr)?;
|
||||
// Assist not applicable if the type is unknown
|
||||
if is_unknown(&ty) {
|
||||
if ty.contains_unknown() {
|
||||
return None;
|
||||
}
|
||||
|
||||
|
@ -53,15 +53,6 @@ pub(crate) fn add_explicit_type(ctx: AssistCtx<impl HirDatabase>) -> Option<Assi
|
|||
})
|
||||
}
|
||||
|
||||
/// Returns true if any type parameter is unknown
|
||||
fn is_unknown(ty: &Ty) -> bool {
|
||||
match ty {
|
||||
Ty::Unknown => true,
|
||||
Ty::Apply(a_ty) => a_ty.parameters.iter().any(is_unknown),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
|
|
@ -35,8 +35,8 @@ pub(crate) fn add_new(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
|||
let strukt = ctx.find_node_at_offset::<ast::StructDef>()?;
|
||||
|
||||
// We want to only apply this to non-union structs with named fields
|
||||
let field_list = match (strukt.kind(), strukt.is_union()) {
|
||||
(StructKind::Record(named), false) => named,
|
||||
let field_list = match strukt.kind() {
|
||||
StructKind::Record(named) => named,
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
|
|
|
@ -83,10 +83,11 @@ fn resolve_enum_def(
|
|||
) -> Option<ast::EnumDef> {
|
||||
let expr_ty = analyzer.type_of(db, &expr)?;
|
||||
|
||||
analyzer.autoderef(db, expr_ty).find_map(|ty| match ty.as_adt() {
|
||||
Some((Adt::Enum(e), _)) => Some(e.source(db).value),
|
||||
let res = expr_ty.autoderef(db).find_map(|ty| match ty.as_adt() {
|
||||
Some(Adt::Enum(e)) => Some(e.source(db).value),
|
||||
_ => None,
|
||||
})
|
||||
});
|
||||
res
|
||||
}
|
||||
|
||||
fn build_pat(var: ast::EnumVariant) -> Option<ast::Pat> {
|
||||
|
|
|
@ -21,6 +21,9 @@ impl salsa::Database for TestDB {
|
|||
fn salsa_runtime(&self) -> &salsa::Runtime<Self> {
|
||||
&self.runtime
|
||||
}
|
||||
fn salsa_runtime_mut(&mut self) -> &mut salsa::Runtime<Self> {
|
||||
&mut self.runtime
|
||||
}
|
||||
}
|
||||
|
||||
impl std::panic::RefUnwindSafe for TestDB {}
|
||||
|
|
|
@ -15,6 +15,6 @@ crossbeam-channel = "0.4.0"
|
|||
ra_vfs = "0.5.0"
|
||||
ra_vfs_glob = { path = "../ra_vfs_glob" }
|
||||
ra_db = { path = "../ra_db" }
|
||||
ra_ide_api = { path = "../ra_ide_api" }
|
||||
ra_ide = { path = "../ra_ide" }
|
||||
ra_hir = { path = "../ra_hir" }
|
||||
ra_project_model = { path = "../ra_project_model" }
|
||||
|
|
|
@ -6,7 +6,7 @@ use rustc_hash::FxHashMap;
|
|||
|
||||
use crossbeam_channel::{unbounded, Receiver};
|
||||
use ra_db::{CrateGraph, FileId, SourceRootId};
|
||||
use ra_ide_api::{AnalysisChange, AnalysisHost, FeatureFlags};
|
||||
use ra_ide::{AnalysisChange, AnalysisHost, FeatureFlags};
|
||||
use ra_project_model::{get_rustc_cfg_options, PackageRoot, ProjectWorkspace};
|
||||
use ra_vfs::{RootEntry, Vfs, VfsChange, VfsTask, Watch};
|
||||
use ra_vfs_glob::RustPackageFilterBuilder;
|
||||
|
|
|
@ -8,10 +8,9 @@ publish = false
|
|||
[dependencies]
|
||||
pico-args = "0.3.0"
|
||||
flexi_logger = "0.14.0"
|
||||
indicatif = "0.13.0"
|
||||
|
||||
ra_syntax = { path = "../ra_syntax" }
|
||||
ra_ide_api = { path = "../ra_ide_api" }
|
||||
ra_ide = { path = "../ra_ide" }
|
||||
ra_batch = { path = "../ra_batch" }
|
||||
ra_hir = { path = "../ra_hir" }
|
||||
ra_db = { path = "../ra_db" }
|
||||
|
|
|
@ -10,7 +10,7 @@ use ra_db::{
|
|||
salsa::{Database, Durability},
|
||||
FileId, SourceDatabaseExt,
|
||||
};
|
||||
use ra_ide_api::{Analysis, AnalysisChange, AnalysisHost, FilePosition, LineCol};
|
||||
use ra_ide::{Analysis, AnalysisChange, AnalysisHost, FilePosition, LineCol};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
|
@ -91,7 +91,7 @@ fn do_work<F: Fn(&Analysis) -> T, T>(host: &mut AnalysisHost, file_id: FileId, w
|
|||
{
|
||||
let start = Instant::now();
|
||||
eprint!("trivial change: ");
|
||||
host.raw_database().salsa_runtime().synthetic_write(Durability::LOW);
|
||||
host.raw_database_mut().salsa_runtime_mut().synthetic_write(Durability::LOW);
|
||||
work(&host.analysis());
|
||||
eprintln!("{:?}", start.elapsed());
|
||||
}
|
||||
|
@ -111,7 +111,7 @@ fn do_work<F: Fn(&Analysis) -> T, T>(host: &mut AnalysisHost, file_id: FileId, w
|
|||
{
|
||||
let start = Instant::now();
|
||||
eprint!("const change: ");
|
||||
host.raw_database().salsa_runtime().synthetic_write(Durability::HIGH);
|
||||
host.raw_database_mut().salsa_runtime_mut().synthetic_write(Durability::HIGH);
|
||||
let res = work(&host.analysis());
|
||||
eprintln!("{:?}", start.elapsed());
|
||||
res
|
||||
|
|
|
@ -6,7 +6,7 @@ use ra_db::SourceDatabaseExt;
|
|||
use ra_hir::{AssocItem, Crate, HasSource, HirDisplay, ModuleDef, Ty, TypeWalk};
|
||||
use ra_syntax::AstNode;
|
||||
|
||||
use crate::{Result, Verbosity};
|
||||
use crate::{progress_report::ProgressReport, Result, Verbosity};
|
||||
|
||||
pub fn run(
|
||||
verbosity: Verbosity,
|
||||
|
@ -75,17 +75,11 @@ pub fn run(
|
|||
println!("Item Collection: {:?}, {}", analysis_time.elapsed(), ra_prof::memory_usage());
|
||||
|
||||
let inference_time = Instant::now();
|
||||
let bar = match verbosity {
|
||||
Verbosity::Verbose | Verbosity::Normal => indicatif::ProgressBar::with_draw_target(
|
||||
funcs.len() as u64,
|
||||
indicatif::ProgressDrawTarget::stderr_nohz(),
|
||||
),
|
||||
Verbosity::Quiet => indicatif::ProgressBar::hidden(),
|
||||
let mut bar = match verbosity {
|
||||
Verbosity::Verbose | Verbosity::Normal => ProgressReport::new(funcs.len() as u64),
|
||||
Verbosity::Quiet => ProgressReport::hidden(),
|
||||
};
|
||||
|
||||
bar.set_style(
|
||||
indicatif::ProgressStyle::default_bar().template("{wide_bar} {pos}/{len}\n{msg}"),
|
||||
);
|
||||
bar.tick();
|
||||
let mut num_exprs = 0;
|
||||
let mut num_exprs_unknown = 0;
|
||||
|
|
|
@ -3,12 +3,13 @@
|
|||
mod analysis_stats;
|
||||
mod analysis_bench;
|
||||
mod help;
|
||||
mod progress_report;
|
||||
|
||||
use std::{error::Error, fmt::Write, io::Read};
|
||||
|
||||
use flexi_logger::Logger;
|
||||
use pico_args::Arguments;
|
||||
use ra_ide_api::{file_structure, Analysis};
|
||||
use ra_ide::{file_structure, Analysis};
|
||||
use ra_prof::profile;
|
||||
use ra_syntax::{AstNode, SourceFile};
|
||||
|
||||
|
|
120
crates/ra_cli/src/progress_report.rs
Normal file
120
crates/ra_cli/src/progress_report.rs
Normal file
|
@ -0,0 +1,120 @@
|
|||
//! A simple progress bar
|
||||
//!
|
||||
//! A single thread non-optimized progress bar
|
||||
use std::io::Write;
|
||||
|
||||
/// A Simple ASCII Progress Bar
|
||||
pub struct ProgressReport {
|
||||
curr: f32,
|
||||
text: String,
|
||||
hidden: bool,
|
||||
|
||||
len: u64,
|
||||
pos: u64,
|
||||
msg: String,
|
||||
}
|
||||
|
||||
impl ProgressReport {
|
||||
pub fn new(len: u64) -> ProgressReport {
|
||||
ProgressReport {
|
||||
curr: 0.0,
|
||||
text: String::new(),
|
||||
hidden: false,
|
||||
len,
|
||||
pos: 0,
|
||||
msg: String::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hidden() -> ProgressReport {
|
||||
ProgressReport {
|
||||
curr: 0.0,
|
||||
text: String::new(),
|
||||
hidden: true,
|
||||
len: 0,
|
||||
pos: 0,
|
||||
msg: String::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_message(&mut self, msg: &str) {
|
||||
self.msg = msg.to_string();
|
||||
self.tick();
|
||||
}
|
||||
|
||||
pub fn println<I: Into<String>>(&mut self, msg: I) {
|
||||
self.clear();
|
||||
println!("{}", msg.into());
|
||||
self.tick();
|
||||
}
|
||||
|
||||
pub fn inc(&mut self, delta: u64) {
|
||||
self.pos += delta;
|
||||
if self.len == 0 {
|
||||
self.set_value(0.0)
|
||||
} else {
|
||||
self.set_value((self.pos as f32) / (self.len as f32))
|
||||
}
|
||||
self.tick();
|
||||
}
|
||||
|
||||
pub fn finish_and_clear(&mut self) {
|
||||
self.clear();
|
||||
}
|
||||
|
||||
pub fn tick(&mut self) {
|
||||
if self.hidden {
|
||||
return;
|
||||
}
|
||||
let percent = (self.curr * 100.0) as u32;
|
||||
let text = format!("{}/{} {:3>}% {}", self.pos, self.len, percent, self.msg);
|
||||
self.update_text(&text);
|
||||
}
|
||||
|
||||
fn update_text(&mut self, text: &str) {
|
||||
// Get length of common portion
|
||||
let mut common_prefix_length = 0;
|
||||
let common_length = usize::min(self.text.len(), text.len());
|
||||
|
||||
while common_prefix_length < common_length
|
||||
&& text.chars().nth(common_prefix_length).unwrap()
|
||||
== self.text.chars().nth(common_prefix_length).unwrap()
|
||||
{
|
||||
common_prefix_length += 1;
|
||||
}
|
||||
|
||||
// Backtrack to the first differing character
|
||||
let mut output = String::new();
|
||||
output += &'\x08'.to_string().repeat(self.text.len() - common_prefix_length);
|
||||
// Output new suffix
|
||||
output += &text[common_prefix_length..text.len()];
|
||||
|
||||
// If the new text is shorter than the old one: delete overlapping characters
|
||||
if let Some(overlap_count) = self.text.len().checked_sub(text.len()) {
|
||||
if overlap_count > 0 {
|
||||
output += &" ".repeat(overlap_count);
|
||||
output += &"\x08".repeat(overlap_count);
|
||||
}
|
||||
}
|
||||
|
||||
let _ = std::io::stdout().write(output.as_bytes());
|
||||
let _ = std::io::stdout().flush();
|
||||
self.text = text.to_string();
|
||||
}
|
||||
|
||||
fn set_value(&mut self, value: f32) {
|
||||
self.curr = f32::max(0.0, f32::min(1.0, value));
|
||||
}
|
||||
|
||||
fn clear(&mut self) {
|
||||
if self.hidden {
|
||||
return;
|
||||
}
|
||||
|
||||
// Fill all last text to space and return the cursor
|
||||
let spaces = " ".repeat(self.text.len());
|
||||
let backspaces = "\x08".repeat(self.text.len());
|
||||
print!("{}{}{}", backspaces, spaces, backspaces);
|
||||
self.text = String::new();
|
||||
}
|
||||
}
|
|
@ -8,7 +8,7 @@ authors = ["rust-analyzer developers"]
|
|||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
salsa = "0.13.0"
|
||||
salsa = "0.14.1"
|
||||
relative-path = "1.0.0"
|
||||
rustc-hash = "1.0"
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
//! ra_db defines basic database traits. The concrete DB is defined by ra_ide_api.
|
||||
//! ra_db defines basic database traits. The concrete DB is defined by ra_ide.
|
||||
mod cancellation;
|
||||
mod input;
|
||||
pub mod fixture;
|
||||
|
|
|
@ -8,28 +8,11 @@ authors = ["rust-analyzer developers"]
|
|||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
arrayvec = "0.5.1"
|
||||
log = "0.4.5"
|
||||
rustc-hash = "1.0"
|
||||
parking_lot = "0.9.0"
|
||||
ena = "0.13"
|
||||
once_cell = "1.0.1"
|
||||
|
||||
ra_syntax = { path = "../ra_syntax" }
|
||||
ra_arena = { path = "../ra_arena" }
|
||||
ra_cfg = { path = "../ra_cfg" }
|
||||
ra_db = { path = "../ra_db" }
|
||||
mbe = { path = "../ra_mbe", package = "ra_mbe" }
|
||||
tt = { path = "../ra_tt", package = "ra_tt" }
|
||||
hir_expand = { path = "../ra_hir_expand", package = "ra_hir_expand" }
|
||||
hir_def = { path = "../ra_hir_def", package = "ra_hir_def" }
|
||||
test_utils = { path = "../test_utils" }
|
||||
ra_prof = { path = "../ra_prof" }
|
||||
|
||||
chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "a88cad7f0a69e05ba8f40b74c58a1c229c1b2478" }
|
||||
chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "a88cad7f0a69e05ba8f40b74c58a1c229c1b2478" }
|
||||
chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "a88cad7f0a69e05ba8f40b74c58a1c229c1b2478" }
|
||||
lalrpop-intern = "0.15.1"
|
||||
|
||||
[dev-dependencies]
|
||||
insta = "0.12.0"
|
||||
hir_ty = { path = "../ra_hir_ty", package = "ra_hir_ty" }
|
||||
|
|
|
@ -6,28 +6,31 @@ use std::sync::Arc;
|
|||
|
||||
use hir_def::{
|
||||
adt::VariantData,
|
||||
body::{Body, BodySourceMap},
|
||||
builtin_type::BuiltinType,
|
||||
docs::Documentation,
|
||||
expr::{BindingAnnotation, Pat, PatId},
|
||||
per_ns::PerNs,
|
||||
resolver::{HasResolver, TypeNs},
|
||||
type_ref::TypeRef,
|
||||
AstItemDef, ConstId, ContainerId, EnumId, FunctionId, HasModule, ImplId, LocalEnumVariantId,
|
||||
LocalImportId, LocalModuleId, LocalStructFieldId, Lookup, ModuleId, StaticId, StructId,
|
||||
TraitId, TypeAliasId, UnionId,
|
||||
resolver::HasResolver,
|
||||
type_ref::{Mutability, TypeRef},
|
||||
AdtId, AstItemDef, ConstId, ContainerId, DefWithBodyId, EnumId, FunctionId, GenericDefId,
|
||||
HasModule, ImplId, LocalEnumVariantId, LocalImportId, LocalModuleId, LocalStructFieldId,
|
||||
Lookup, ModuleId, StaticId, StructId, TraitId, TypeAliasId, UnionId,
|
||||
};
|
||||
use hir_expand::{
|
||||
diagnostics::DiagnosticSink,
|
||||
name::{self, AsName},
|
||||
AstId, MacroDefId,
|
||||
};
|
||||
use hir_ty::expr::ExprValidator;
|
||||
use ra_db::{CrateId, Edition, FileId, FilePosition};
|
||||
use ra_syntax::{ast, AstNode, SyntaxNode};
|
||||
|
||||
use crate::{
|
||||
db::{DefDatabase, HirDatabase},
|
||||
expr::{BindingAnnotation, Body, BodySourceMap, ExprValidator, Pat, PatId},
|
||||
ty::{InferenceResult, Namespace, TraitRef},
|
||||
Either, Name, Source, Ty,
|
||||
ty::display::HirFormatter,
|
||||
ty::{self, InEnvironment, InferenceResult, TraitEnvironment, Ty, TyDefId, TypeCtor, TypeWalk},
|
||||
CallableDef, Either, HirDisplay, Name, Source,
|
||||
};
|
||||
|
||||
/// hir::Crate describes a single crate. It's the main interface with which
|
||||
|
@ -168,15 +171,15 @@ pub use hir_def::attr::Attrs;
|
|||
|
||||
impl Module {
|
||||
pub(crate) fn new(krate: Crate, crate_module_id: LocalModuleId) -> Module {
|
||||
Module { id: ModuleId { krate: krate.crate_id, module_id: crate_module_id } }
|
||||
Module { id: ModuleId { krate: krate.crate_id, local_id: crate_module_id } }
|
||||
}
|
||||
|
||||
/// Name of this module.
|
||||
pub fn name(self, db: &impl DefDatabase) -> Option<Name> {
|
||||
let def_map = db.crate_def_map(self.id.krate);
|
||||
let parent = def_map[self.id.module_id].parent?;
|
||||
let parent = def_map[self.id.local_id].parent?;
|
||||
def_map[parent].children.iter().find_map(|(name, module_id)| {
|
||||
if *module_id == self.id.module_id {
|
||||
if *module_id == self.id.local_id {
|
||||
Some(name.clone())
|
||||
} else {
|
||||
None
|
||||
|
@ -200,14 +203,14 @@ impl Module {
|
|||
/// Finds a child module with the specified name.
|
||||
pub fn child(self, db: &impl DefDatabase, name: &Name) -> Option<Module> {
|
||||
let def_map = db.crate_def_map(self.id.krate);
|
||||
let child_id = def_map[self.id.module_id].children.get(name)?;
|
||||
let child_id = def_map[self.id.local_id].children.get(name)?;
|
||||
Some(self.with_module_id(*child_id))
|
||||
}
|
||||
|
||||
/// Iterates over all child modules.
|
||||
pub fn children(self, db: &impl DefDatabase) -> impl Iterator<Item = Module> {
|
||||
let def_map = db.crate_def_map(self.id.krate);
|
||||
let children = def_map[self.id.module_id]
|
||||
let children = def_map[self.id.local_id]
|
||||
.children
|
||||
.iter()
|
||||
.map(|(_, module_id)| self.with_module_id(*module_id))
|
||||
|
@ -218,7 +221,7 @@ impl Module {
|
|||
/// Finds a parent module.
|
||||
pub fn parent(self, db: &impl DefDatabase) -> Option<Module> {
|
||||
let def_map = db.crate_def_map(self.id.krate);
|
||||
let parent_id = def_map[self.id.module_id].parent?;
|
||||
let parent_id = def_map[self.id.local_id].parent?;
|
||||
Some(self.with_module_id(parent_id))
|
||||
}
|
||||
|
||||
|
@ -234,7 +237,7 @@ impl Module {
|
|||
|
||||
/// Returns a `ModuleScope`: a set of items, visible in this module.
|
||||
pub fn scope(self, db: &impl HirDatabase) -> Vec<(Name, ScopeDef, Option<Import>)> {
|
||||
db.crate_def_map(self.id.krate)[self.id.module_id]
|
||||
db.crate_def_map(self.id.krate)[self.id.local_id]
|
||||
.scope
|
||||
.entries()
|
||||
.map(|(name, res)| {
|
||||
|
@ -244,7 +247,7 @@ impl Module {
|
|||
}
|
||||
|
||||
pub fn diagnostics(self, db: &impl HirDatabase, sink: &mut DiagnosticSink) {
|
||||
db.crate_def_map(self.id.krate).add_diagnostics(db, self.id.module_id, sink);
|
||||
db.crate_def_map(self.id.krate).add_diagnostics(db, self.id.local_id, sink);
|
||||
for decl in self.declarations(db) {
|
||||
match decl {
|
||||
crate::ModuleDef::Function(f) => f.diagnostics(db, sink),
|
||||
|
@ -269,12 +272,12 @@ impl Module {
|
|||
|
||||
pub fn declarations(self, db: &impl DefDatabase) -> Vec<ModuleDef> {
|
||||
let def_map = db.crate_def_map(self.id.krate);
|
||||
def_map[self.id.module_id].scope.declarations().map(ModuleDef::from).collect()
|
||||
def_map[self.id.local_id].scope.declarations().map(ModuleDef::from).collect()
|
||||
}
|
||||
|
||||
pub fn impl_blocks(self, db: &impl DefDatabase) -> Vec<ImplBlock> {
|
||||
let def_map = db.crate_def_map(self.id.krate);
|
||||
def_map[self.id.module_id].impls.iter().copied().map(ImplBlock::from).collect()
|
||||
def_map[self.id.local_id].impls.iter().copied().map(ImplBlock::from).collect()
|
||||
}
|
||||
|
||||
fn with_module_id(self, module_id: LocalModuleId) -> Module {
|
||||
|
@ -320,14 +323,14 @@ pub struct Struct {
|
|||
|
||||
impl Struct {
|
||||
pub fn module(self, db: &impl DefDatabase) -> Module {
|
||||
Module { id: self.id.0.module(db) }
|
||||
Module { id: self.id.module(db) }
|
||||
}
|
||||
|
||||
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> {
|
||||
Some(self.module(db).krate())
|
||||
}
|
||||
|
||||
pub fn name(self, db: &impl DefDatabase) -> Option<Name> {
|
||||
pub fn name(self, db: &impl DefDatabase) -> Name {
|
||||
db.struct_data(self.id.into()).name.clone()
|
||||
}
|
||||
|
||||
|
@ -349,12 +352,12 @@ impl Struct {
|
|||
.map(|(id, _)| StructField { parent: self.into(), id })
|
||||
}
|
||||
|
||||
pub fn ty(self, db: &impl HirDatabase) -> Ty {
|
||||
db.type_for_def(self.into(), Namespace::Types)
|
||||
pub fn ty(self, db: &impl HirDatabase) -> Type {
|
||||
Type::from_def(db, self.id.module(db).krate, self.id)
|
||||
}
|
||||
|
||||
pub fn constructor_ty(self, db: &impl HirDatabase) -> Ty {
|
||||
db.type_for_def(self.into(), Namespace::Values)
|
||||
db.value_ty(self.id.into())
|
||||
}
|
||||
|
||||
fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> {
|
||||
|
@ -368,16 +371,38 @@ pub struct Union {
|
|||
}
|
||||
|
||||
impl Union {
|
||||
pub fn name(self, db: &impl DefDatabase) -> Option<Name> {
|
||||
db.struct_data(self.id.into()).name.clone()
|
||||
pub fn name(self, db: &impl DefDatabase) -> Name {
|
||||
db.union_data(self.id).name.clone()
|
||||
}
|
||||
|
||||
pub fn module(self, db: &impl DefDatabase) -> Module {
|
||||
Module { id: self.id.0.module(db) }
|
||||
Module { id: self.id.module(db) }
|
||||
}
|
||||
|
||||
pub fn ty(self, db: &impl HirDatabase) -> Ty {
|
||||
db.type_for_def(self.into(), Namespace::Types)
|
||||
pub fn ty(self, db: &impl HirDatabase) -> Type {
|
||||
Type::from_def(db, self.id.module(db).krate, self.id)
|
||||
}
|
||||
|
||||
pub fn fields(self, db: &impl HirDatabase) -> Vec<StructField> {
|
||||
db.union_data(self.id)
|
||||
.variant_data
|
||||
.fields()
|
||||
.iter()
|
||||
.map(|(id, _)| StructField { parent: self.into(), id })
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn field(self, db: &impl HirDatabase, name: &Name) -> Option<StructField> {
|
||||
db.union_data(self.id)
|
||||
.variant_data
|
||||
.fields()
|
||||
.iter()
|
||||
.find(|(_id, data)| data.name == *name)
|
||||
.map(|(id, _)| StructField { parent: self.into(), id })
|
||||
}
|
||||
|
||||
fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> {
|
||||
db.union_data(self.id).variant_data.clone()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -395,7 +420,7 @@ impl Enum {
|
|||
Some(self.module(db).krate())
|
||||
}
|
||||
|
||||
pub fn name(self, db: &impl DefDatabase) -> Option<Name> {
|
||||
pub fn name(self, db: &impl DefDatabase) -> Name {
|
||||
db.enum_data(self.id).name.clone()
|
||||
}
|
||||
|
||||
|
@ -408,15 +433,12 @@ impl Enum {
|
|||
}
|
||||
|
||||
pub fn variant(self, db: &impl DefDatabase, name: &Name) -> Option<EnumVariant> {
|
||||
db.enum_data(self.id)
|
||||
.variants
|
||||
.iter()
|
||||
.find(|(_id, data)| data.name.as_ref() == Some(name))
|
||||
.map(|(id, _)| EnumVariant { parent: self, id })
|
||||
let id = db.enum_data(self.id).variant(name)?;
|
||||
Some(EnumVariant { parent: self, id })
|
||||
}
|
||||
|
||||
pub fn ty(self, db: &impl HirDatabase) -> Ty {
|
||||
db.type_for_def(self.into(), Namespace::Types)
|
||||
pub fn ty(self, db: &impl HirDatabase) -> Type {
|
||||
Type::from_def(db, self.id.module(db).krate, self.id)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -434,7 +456,7 @@ impl EnumVariant {
|
|||
self.parent
|
||||
}
|
||||
|
||||
pub fn name(self, db: &impl DefDatabase) -> Option<Name> {
|
||||
pub fn name(self, db: &impl DefDatabase) -> Name {
|
||||
db.enum_data(self.parent.id).variants[self.id].name.clone()
|
||||
}
|
||||
|
||||
|
@ -469,12 +491,13 @@ pub enum Adt {
|
|||
impl_froms!(Adt: Struct, Union, Enum);
|
||||
|
||||
impl Adt {
|
||||
pub fn ty(self, db: &impl HirDatabase) -> Ty {
|
||||
match self {
|
||||
Adt::Struct(it) => it.ty(db),
|
||||
Adt::Union(it) => it.ty(db),
|
||||
Adt::Enum(it) => it.ty(db),
|
||||
}
|
||||
pub fn has_non_default_type_params(self, db: &impl HirDatabase) -> bool {
|
||||
let subst = db.generic_defaults(self.into());
|
||||
subst.iter().any(|ty| ty == &Ty::Unknown)
|
||||
}
|
||||
pub fn ty(self, db: &impl HirDatabase) -> Type {
|
||||
let id = AdtId::from(self);
|
||||
Type::from_def(db, id.module(db).krate, id)
|
||||
}
|
||||
|
||||
pub fn module(self, db: &impl DefDatabase) -> Module {
|
||||
|
@ -493,28 +516,24 @@ impl Adt {
|
|||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum VariantDef {
|
||||
Struct(Struct),
|
||||
Union(Union),
|
||||
EnumVariant(EnumVariant),
|
||||
}
|
||||
impl_froms!(VariantDef: Struct, EnumVariant);
|
||||
impl_froms!(VariantDef: Struct, Union, EnumVariant);
|
||||
|
||||
impl VariantDef {
|
||||
pub fn fields(self, db: &impl HirDatabase) -> Vec<StructField> {
|
||||
match self {
|
||||
VariantDef::Struct(it) => it.fields(db),
|
||||
VariantDef::Union(it) => it.fields(db),
|
||||
VariantDef::EnumVariant(it) => it.fields(db),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn field(self, db: &impl HirDatabase, name: &Name) -> Option<StructField> {
|
||||
match self {
|
||||
VariantDef::Struct(it) => it.field(db, name),
|
||||
VariantDef::EnumVariant(it) => it.field(db, name),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn module(self, db: &impl HirDatabase) -> Module {
|
||||
match self {
|
||||
VariantDef::Struct(it) => it.module(db),
|
||||
VariantDef::Union(it) => it.module(db),
|
||||
VariantDef::EnumVariant(it) => it.module(db),
|
||||
}
|
||||
}
|
||||
|
@ -522,6 +541,7 @@ impl VariantDef {
|
|||
pub(crate) fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> {
|
||||
match self {
|
||||
VariantDef::Struct(it) => it.variant_data(db),
|
||||
VariantDef::Union(it) => it.variant_data(db),
|
||||
VariantDef::EnumVariant(it) => it.variant_data(db),
|
||||
}
|
||||
}
|
||||
|
@ -538,14 +558,6 @@ pub enum DefWithBody {
|
|||
impl_froms!(DefWithBody: Function, Const, Static);
|
||||
|
||||
impl DefWithBody {
|
||||
pub(crate) fn krate(self, db: &impl HirDatabase) -> Option<Crate> {
|
||||
match self {
|
||||
DefWithBody::Const(c) => c.krate(db),
|
||||
DefWithBody::Function(f) => f.krate(db),
|
||||
DefWithBody::Static(s) => s.krate(db),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn module(self, db: &impl HirDatabase) -> Module {
|
||||
match self {
|
||||
DefWithBody::Const(c) => c.module(db),
|
||||
|
@ -590,11 +602,11 @@ impl Function {
|
|||
}
|
||||
|
||||
pub fn ty(self, db: &impl HirDatabase) -> Ty {
|
||||
db.type_for_def(self.into(), Namespace::Values)
|
||||
db.value_ty(self.id.into())
|
||||
}
|
||||
|
||||
pub fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult> {
|
||||
db.infer(self.into())
|
||||
db.infer(self.id.into())
|
||||
}
|
||||
|
||||
/// The containing impl block, if this is a method.
|
||||
|
@ -623,8 +635,8 @@ impl Function {
|
|||
|
||||
pub fn diagnostics(self, db: &impl HirDatabase, sink: &mut DiagnosticSink) {
|
||||
let infer = self.infer(db);
|
||||
infer.add_diagnostics(db, self, sink);
|
||||
let mut validator = ExprValidator::new(self, infer, sink);
|
||||
infer.add_diagnostics(db, self.id, sink);
|
||||
let mut validator = ExprValidator::new(self.id, infer, sink);
|
||||
validator.validate_body(db);
|
||||
}
|
||||
}
|
||||
|
@ -648,7 +660,7 @@ impl Const {
|
|||
}
|
||||
|
||||
pub fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult> {
|
||||
db.infer(self.into())
|
||||
db.infer(self.id.into())
|
||||
}
|
||||
|
||||
/// The containing impl block, if this is a type alias.
|
||||
|
@ -691,7 +703,7 @@ impl Static {
|
|||
}
|
||||
|
||||
pub fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult> {
|
||||
db.infer(self.into())
|
||||
db.infer(self.id.into())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -705,73 +717,12 @@ impl Trait {
|
|||
Module { id: self.id.module(db) }
|
||||
}
|
||||
|
||||
pub fn name(self, db: &impl DefDatabase) -> Option<Name> {
|
||||
pub fn name(self, db: &impl DefDatabase) -> Name {
|
||||
db.trait_data(self.id).name.clone()
|
||||
}
|
||||
|
||||
pub fn items(self, db: &impl DefDatabase) -> Vec<AssocItem> {
|
||||
db.trait_data(self.id).items.iter().map(|it| (*it).into()).collect()
|
||||
}
|
||||
|
||||
fn direct_super_traits(self, db: &impl HirDatabase) -> Vec<Trait> {
|
||||
let resolver = self.id.resolver(db);
|
||||
// returning the iterator directly doesn't easily work because of
|
||||
// lifetime problems, but since there usually shouldn't be more than a
|
||||
// few direct traits this should be fine (we could even use some kind of
|
||||
// SmallVec if performance is a concern)
|
||||
db.generic_params(self.id.into())
|
||||
.where_predicates
|
||||
.iter()
|
||||
.filter_map(|pred| match &pred.type_ref {
|
||||
TypeRef::Path(p) if p.as_ident() == Some(&name::SELF_TYPE) => pred.bound.as_path(),
|
||||
_ => None,
|
||||
})
|
||||
.filter_map(|path| match resolver.resolve_path_in_type_ns_fully(db, path) {
|
||||
Some(TypeNs::TraitId(t)) => Some(t),
|
||||
_ => None,
|
||||
})
|
||||
.map(Trait::from)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Returns an iterator over the whole super trait hierarchy (including the
|
||||
/// trait itself).
|
||||
pub fn all_super_traits(self, db: &impl HirDatabase) -> Vec<Trait> {
|
||||
// we need to take care a bit here to avoid infinite loops in case of cycles
|
||||
// (i.e. if we have `trait A: B; trait B: A;`)
|
||||
let mut result = vec![self];
|
||||
let mut i = 0;
|
||||
while i < result.len() {
|
||||
let t = result[i];
|
||||
// yeah this is quadratic, but trait hierarchies should be flat
|
||||
// enough that this doesn't matter
|
||||
for tt in t.direct_super_traits(db) {
|
||||
if !result.contains(&tt) {
|
||||
result.push(tt);
|
||||
}
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
pub fn associated_type_by_name(self, db: &impl DefDatabase, name: &Name) -> Option<TypeAlias> {
|
||||
let trait_data = db.trait_data(self.id);
|
||||
let res =
|
||||
trait_data.associated_types().map(TypeAlias::from).find(|t| &t.name(db) == name)?;
|
||||
Some(res)
|
||||
}
|
||||
|
||||
pub fn associated_type_by_name_including_super_traits(
|
||||
self,
|
||||
db: &impl HirDatabase,
|
||||
name: &Name,
|
||||
) -> Option<TypeAlias> {
|
||||
self.all_super_traits(db).into_iter().find_map(|t| t.associated_type_by_name(db, name))
|
||||
}
|
||||
|
||||
pub fn trait_ref(self, db: &impl HirDatabase) -> TraitRef {
|
||||
TraitRef::for_trait(db, self)
|
||||
db.trait_data(self.id).items.iter().map(|(_name, it)| (*it).into()).collect()
|
||||
}
|
||||
|
||||
pub fn is_auto(self, db: &impl DefDatabase) -> bool {
|
||||
|
@ -785,6 +736,11 @@ pub struct TypeAlias {
|
|||
}
|
||||
|
||||
impl TypeAlias {
|
||||
pub fn has_non_default_type_params(self, db: &impl HirDatabase) -> bool {
|
||||
let subst = db.generic_defaults(self.id.into());
|
||||
subst.iter().any(|ty| ty == &Ty::Unknown)
|
||||
}
|
||||
|
||||
pub fn module(self, db: &impl DefDatabase) -> Module {
|
||||
Module { id: self.id.lookup(db).module(db) }
|
||||
}
|
||||
|
@ -821,8 +777,8 @@ impl TypeAlias {
|
|||
db.type_alias_data(self.id).type_ref.clone()
|
||||
}
|
||||
|
||||
pub fn ty(self, db: &impl HirDatabase) -> Ty {
|
||||
db.type_for_def(self.into(), Namespace::Types)
|
||||
pub fn ty(self, db: &impl HirDatabase) -> Type {
|
||||
Type::from_def(db, self.id.lookup(db).module(db).krate, self.id)
|
||||
}
|
||||
|
||||
pub fn name(self, db: &impl DefDatabase) -> Name {
|
||||
|
@ -897,16 +853,6 @@ impl_froms!(
|
|||
Const
|
||||
);
|
||||
|
||||
impl From<AssocItem> for GenericDef {
|
||||
fn from(item: AssocItem) -> Self {
|
||||
match item {
|
||||
AssocItem::Function(f) => f.into(),
|
||||
AssocItem::Const(c) => c.into(),
|
||||
AssocItem::TypeAlias(t) => t.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct Local {
|
||||
pub(crate) parent: DefWithBody,
|
||||
|
@ -945,9 +891,14 @@ impl Local {
|
|||
self.parent.module(db)
|
||||
}
|
||||
|
||||
pub fn ty(self, db: &impl HirDatabase) -> Ty {
|
||||
let infer = db.infer(self.parent);
|
||||
infer[self.pat_id].clone()
|
||||
pub fn ty(self, db: &impl HirDatabase) -> Type {
|
||||
let def = DefWithBodyId::from(self.parent);
|
||||
let infer = db.infer(def);
|
||||
let ty = infer[self.pat_id].clone();
|
||||
let resolver = def.resolver(db);
|
||||
let krate = def.module(db).krate;
|
||||
let environment = TraitEnvironment::lower(db, &resolver);
|
||||
Type { krate, ty: InEnvironment { value: ty, environment } }
|
||||
}
|
||||
|
||||
pub fn source(self, db: &impl HirDatabase) -> Source<Either<ast::BindPat, ast::SelfParam>> {
|
||||
|
@ -960,7 +911,7 @@ impl Local {
|
|||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct GenericParam {
|
||||
pub(crate) parent: GenericDef,
|
||||
pub(crate) parent: GenericDefId,
|
||||
pub(crate) idx: u32,
|
||||
}
|
||||
|
||||
|
@ -970,6 +921,15 @@ pub struct ImplBlock {
|
|||
}
|
||||
|
||||
impl ImplBlock {
|
||||
pub fn all_in_crate(db: &impl HirDatabase, krate: Crate) -> Vec<ImplBlock> {
|
||||
let impls = db.impls_in_crate(krate.crate_id);
|
||||
impls.all_impls().map(Self::from).collect()
|
||||
}
|
||||
pub fn for_trait(db: &impl HirDatabase, krate: Crate, trait_: Trait) -> Vec<ImplBlock> {
|
||||
let impls = db.impls_in_crate(krate.crate_id);
|
||||
impls.lookup_impl_blocks_for_trait(trait_.id).map(Self::from).collect()
|
||||
}
|
||||
|
||||
pub fn target_trait(&self, db: &impl DefDatabase) -> Option<TypeRef> {
|
||||
db.impl_data(self.id).target_trait.clone()
|
||||
}
|
||||
|
@ -978,13 +938,12 @@ impl ImplBlock {
|
|||
db.impl_data(self.id).target_type.clone()
|
||||
}
|
||||
|
||||
pub fn target_ty(&self, db: &impl HirDatabase) -> Ty {
|
||||
Ty::from_hir(db, &self.id.resolver(db), &self.target_type(db))
|
||||
}
|
||||
|
||||
pub fn target_trait_ref(&self, db: &impl HirDatabase) -> Option<TraitRef> {
|
||||
let target_ty = self.target_ty(db);
|
||||
TraitRef::from_hir(db, &self.id.resolver(db), &self.target_trait(db)?, Some(target_ty))
|
||||
pub fn target_ty(&self, db: &impl HirDatabase) -> Type {
|
||||
let impl_data = db.impl_data(self.id);
|
||||
let resolver = self.id.resolver(db);
|
||||
let environment = TraitEnvironment::lower(db, &resolver);
|
||||
let ty = Ty::from_hir(db, &resolver, &impl_data.target_type);
|
||||
Type { krate: self.id.module(db).krate, ty: InEnvironment { value: ty, environment } }
|
||||
}
|
||||
|
||||
pub fn items(&self, db: &impl DefDatabase) -> Vec<AssocItem> {
|
||||
|
@ -1004,6 +963,194 @@ impl ImplBlock {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct Type {
|
||||
pub(crate) krate: CrateId,
|
||||
pub(crate) ty: InEnvironment<Ty>,
|
||||
}
|
||||
|
||||
impl Type {
|
||||
fn from_def(
|
||||
db: &impl HirDatabase,
|
||||
krate: CrateId,
|
||||
def: impl HasResolver + Into<TyDefId>,
|
||||
) -> Type {
|
||||
let resolver = def.resolver(db);
|
||||
let environment = TraitEnvironment::lower(db, &resolver);
|
||||
let ty = db.ty(def.into());
|
||||
Type { krate, ty: InEnvironment { value: ty, environment } }
|
||||
}
|
||||
|
||||
pub fn is_bool(&self) -> bool {
|
||||
match &self.ty.value {
|
||||
Ty::Apply(a_ty) => match a_ty.ctor {
|
||||
TypeCtor::Bool => true,
|
||||
_ => false,
|
||||
},
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_mutable_reference(&self) -> bool {
|
||||
match &self.ty.value {
|
||||
Ty::Apply(a_ty) => match a_ty.ctor {
|
||||
TypeCtor::Ref(Mutability::Mut) => true,
|
||||
_ => false,
|
||||
},
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_unknown(&self) -> bool {
|
||||
match &self.ty.value {
|
||||
Ty::Unknown => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: this method is broken, as it doesn't take closures into account.
|
||||
pub fn as_callable(&self) -> Option<CallableDef> {
|
||||
Some(self.ty.value.as_callable()?.0)
|
||||
}
|
||||
|
||||
pub fn contains_unknown(&self) -> bool {
|
||||
return go(&self.ty.value);
|
||||
|
||||
fn go(ty: &Ty) -> bool {
|
||||
match ty {
|
||||
Ty::Unknown => true,
|
||||
Ty::Apply(a_ty) => a_ty.parameters.iter().any(go),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fields(&self, db: &impl HirDatabase) -> Vec<(StructField, Type)> {
|
||||
if let Ty::Apply(a_ty) = &self.ty.value {
|
||||
match a_ty.ctor {
|
||||
ty::TypeCtor::Adt(AdtId::StructId(s)) => {
|
||||
let var_def = s.into();
|
||||
return db
|
||||
.field_types(var_def)
|
||||
.iter()
|
||||
.map(|(local_id, ty)| {
|
||||
let def = StructField { parent: var_def.into(), id: local_id };
|
||||
let ty = ty.clone().subst(&a_ty.parameters);
|
||||
(def, self.derived(ty))
|
||||
})
|
||||
.collect();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
};
|
||||
Vec::new()
|
||||
}
|
||||
|
||||
pub fn tuple_fields(&self, _db: &impl HirDatabase) -> Vec<Type> {
|
||||
let mut res = Vec::new();
|
||||
if let Ty::Apply(a_ty) = &self.ty.value {
|
||||
match a_ty.ctor {
|
||||
ty::TypeCtor::Tuple { .. } => {
|
||||
for ty in a_ty.parameters.iter() {
|
||||
let ty = ty.clone().subst(&a_ty.parameters);
|
||||
res.push(self.derived(ty));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
};
|
||||
res
|
||||
}
|
||||
|
||||
pub fn variant_fields(
|
||||
&self,
|
||||
db: &impl HirDatabase,
|
||||
def: VariantDef,
|
||||
) -> Vec<(StructField, Type)> {
|
||||
// FIXME: check that ty and def match
|
||||
match &self.ty.value {
|
||||
Ty::Apply(a_ty) => def
|
||||
.fields(db)
|
||||
.into_iter()
|
||||
.map(|it| (it, self.derived(it.ty(db).subst(&a_ty.parameters))))
|
||||
.collect(),
|
||||
_ => Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn autoderef<'a>(&'a self, db: &'a impl HirDatabase) -> impl Iterator<Item = Type> + 'a {
|
||||
// There should be no inference vars in types passed here
|
||||
// FIXME check that?
|
||||
let canonical = crate::ty::Canonical { value: self.ty.value.clone(), num_vars: 0 };
|
||||
let environment = self.ty.environment.clone();
|
||||
let ty = InEnvironment { value: canonical, environment: environment.clone() };
|
||||
ty::autoderef(db, Some(self.krate), ty)
|
||||
.map(|canonical| canonical.value)
|
||||
.map(move |ty| self.derived(ty))
|
||||
}
|
||||
|
||||
// This would be nicer if it just returned an iterator, but that runs into
|
||||
// lifetime problems, because we need to borrow temp `CrateImplBlocks`.
|
||||
pub fn iterate_impl_items<T>(
|
||||
self,
|
||||
db: &impl HirDatabase,
|
||||
krate: Crate,
|
||||
mut callback: impl FnMut(AssocItem) -> Option<T>,
|
||||
) -> Option<T> {
|
||||
for krate in self.ty.value.def_crates(db, krate.crate_id)? {
|
||||
let impls = db.impls_in_crate(krate);
|
||||
|
||||
for impl_block in impls.lookup_impl_blocks(&self.ty.value) {
|
||||
for &item in db.impl_data(impl_block).items.iter() {
|
||||
if let Some(result) = callback(item.into()) {
|
||||
return Some(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
// FIXME: remove
|
||||
pub fn into_ty(self) -> Ty {
|
||||
self.ty.value
|
||||
}
|
||||
|
||||
pub fn as_adt(&self) -> Option<Adt> {
|
||||
let (adt, _subst) = self.ty.value.as_adt()?;
|
||||
Some(adt.into())
|
||||
}
|
||||
|
||||
// FIXME: provide required accessors such that it becomes implementable from outside.
|
||||
pub fn is_equal_for_find_impls(&self, other: &Type) -> bool {
|
||||
match (&self.ty.value, &other.ty.value) {
|
||||
(Ty::Apply(a_original_ty), Ty::Apply(ty::ApplicationTy { ctor, parameters })) => {
|
||||
match ctor {
|
||||
TypeCtor::Ref(..) => match parameters.as_single() {
|
||||
Ty::Apply(a_ty) => a_original_ty.ctor == a_ty.ctor,
|
||||
_ => false,
|
||||
},
|
||||
_ => a_original_ty.ctor == *ctor,
|
||||
}
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn derived(&self, ty: Ty) -> Type {
|
||||
Type {
|
||||
krate: self.krate,
|
||||
ty: InEnvironment { value: ty, environment: self.ty.environment.clone() },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HirDisplay for Type {
|
||||
fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> std::fmt::Result {
|
||||
self.ty.value.hir_fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
/// For IDE only
|
||||
pub enum ScopeDef {
|
||||
ModuleDef(ModuleDef),
|
||||
|
|
|
@ -22,7 +22,7 @@ impl Module {
|
|||
/// Returns a node which defines this module. That is, a file or a `mod foo {}` with items.
|
||||
pub fn definition_source(self, db: &impl DefDatabase) -> Source<ModuleSource> {
|
||||
let def_map = db.crate_def_map(self.id.krate);
|
||||
let src = def_map[self.id.module_id].definition_source(db);
|
||||
let src = def_map[self.id.local_id].definition_source(db);
|
||||
src.map(|it| match it {
|
||||
Either::A(it) => ModuleSource::SourceFile(it),
|
||||
Either::B(it) => ModuleSource::Module(it),
|
||||
|
@ -33,7 +33,7 @@ impl Module {
|
|||
/// `None` for the crate root.
|
||||
pub fn declaration_source(self, db: &impl DefDatabase) -> Option<Source<ast::Module>> {
|
||||
let def_map = db.crate_def_map(self.id.krate);
|
||||
def_map[self.id.module_id].declaration_source(db)
|
||||
def_map[self.id.local_id].declaration_source(db)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -51,13 +51,13 @@ impl HasSource for StructField {
|
|||
impl HasSource for Struct {
|
||||
type Ast = ast::StructDef;
|
||||
fn source(self, db: &impl DefDatabase) -> Source<ast::StructDef> {
|
||||
self.id.0.source(db)
|
||||
self.id.source(db)
|
||||
}
|
||||
}
|
||||
impl HasSource for Union {
|
||||
type Ast = ast::StructDef;
|
||||
fn source(self, db: &impl DefDatabase) -> Source<ast::StructDef> {
|
||||
self.id.0.source(db)
|
||||
type Ast = ast::UnionDef;
|
||||
fn source(self, db: &impl DefDatabase) -> Source<ast::UnionDef> {
|
||||
self.id.source(db)
|
||||
}
|
||||
}
|
||||
impl HasSource for Enum {
|
||||
|
|
|
@ -1,130 +1,22 @@
|
|||
//! FIXME: write short doc here
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use ra_arena::map::ArenaMap;
|
||||
use ra_db::salsa;
|
||||
|
||||
use crate::{
|
||||
ty::{
|
||||
method_resolution::CrateImplBlocks,
|
||||
traits::{AssocTyValue, Impl},
|
||||
CallableDef, FnSig, GenericPredicate, InferenceResult, Namespace, Substs, Ty, TypableDef,
|
||||
TypeCtor,
|
||||
},
|
||||
Crate, DefWithBody, GenericDef, ImplBlock, Trait,
|
||||
};
|
||||
|
||||
pub use hir_def::{
|
||||
db::{
|
||||
BodyQuery, BodyWithSourceMapQuery, ConstDataQuery, CrateDefMapQuery, CrateLangItemsQuery,
|
||||
DefDatabase, DefDatabaseStorage, DocumentationQuery, EnumDataQuery, ExprScopesQuery,
|
||||
FunctionDataQuery, GenericParamsQuery, ImplDataQuery, InternDatabase,
|
||||
InternDatabaseStorage, LangItemQuery, ModuleLangItemsQuery, RawItemsQuery,
|
||||
RawItemsWithSourceMapQuery, StaticDataQuery, StructDataQuery, TraitDataQuery,
|
||||
TypeAliasDataQuery,
|
||||
},
|
||||
LocalStructFieldId, VariantId,
|
||||
pub use hir_def::db::{
|
||||
BodyQuery, BodyWithSourceMapQuery, ConstDataQuery, CrateDefMapQuery, CrateLangItemsQuery,
|
||||
DefDatabase, DefDatabaseStorage, DocumentationQuery, EnumDataQuery, ExprScopesQuery,
|
||||
FunctionDataQuery, GenericParamsQuery, ImplDataQuery, InternDatabase, InternDatabaseStorage,
|
||||
LangItemQuery, ModuleLangItemsQuery, RawItemsQuery, RawItemsWithSourceMapQuery,
|
||||
StaticDataQuery, StructDataQuery, TraitDataQuery, TypeAliasDataQuery,
|
||||
};
|
||||
pub use hir_expand::db::{
|
||||
AstDatabase, AstDatabaseStorage, AstIdMapQuery, MacroArgQuery, MacroDefQuery, MacroExpandQuery,
|
||||
ParseMacroQuery,
|
||||
};
|
||||
|
||||
#[salsa::query_group(HirDatabaseStorage)]
|
||||
#[salsa::requires(salsa::Database)]
|
||||
pub trait HirDatabase: DefDatabase {
|
||||
#[salsa::invoke(crate::ty::infer_query)]
|
||||
fn infer(&self, def: DefWithBody) -> Arc<InferenceResult>;
|
||||
|
||||
#[salsa::invoke(crate::ty::type_for_def)]
|
||||
fn type_for_def(&self, def: TypableDef, ns: Namespace) -> Ty;
|
||||
|
||||
#[salsa::invoke(crate::ty::field_types_query)]
|
||||
fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalStructFieldId, Ty>>;
|
||||
|
||||
#[salsa::invoke(crate::ty::callable_item_sig)]
|
||||
fn callable_item_signature(&self, def: CallableDef) -> FnSig;
|
||||
|
||||
#[salsa::invoke(crate::ty::generic_predicates_for_param_query)]
|
||||
fn generic_predicates_for_param(
|
||||
&self,
|
||||
def: GenericDef,
|
||||
param_idx: u32,
|
||||
) -> Arc<[GenericPredicate]>;
|
||||
|
||||
#[salsa::invoke(crate::ty::generic_predicates_query)]
|
||||
fn generic_predicates(&self, def: GenericDef) -> Arc<[GenericPredicate]>;
|
||||
|
||||
#[salsa::invoke(crate::ty::generic_defaults_query)]
|
||||
fn generic_defaults(&self, def: GenericDef) -> Substs;
|
||||
|
||||
#[salsa::invoke(crate::ty::method_resolution::CrateImplBlocks::impls_in_crate_query)]
|
||||
fn impls_in_crate(&self, krate: Crate) -> Arc<CrateImplBlocks>;
|
||||
|
||||
#[salsa::invoke(crate::ty::traits::impls_for_trait_query)]
|
||||
fn impls_for_trait(&self, krate: Crate, trait_: Trait) -> Arc<[ImplBlock]>;
|
||||
|
||||
/// This provides the Chalk trait solver instance. Because Chalk always
|
||||
/// works from a specific crate, this query is keyed on the crate; and
|
||||
/// because Chalk does its own internal caching, the solver is wrapped in a
|
||||
/// Mutex and the query does an untracked read internally, to make sure the
|
||||
/// cached state is thrown away when input facts change.
|
||||
#[salsa::invoke(crate::ty::traits::trait_solver_query)]
|
||||
fn trait_solver(&self, krate: Crate) -> crate::ty::traits::TraitSolver;
|
||||
|
||||
// Interned IDs for Chalk integration
|
||||
#[salsa::interned]
|
||||
fn intern_type_ctor(&self, type_ctor: TypeCtor) -> crate::ty::TypeCtorId;
|
||||
#[salsa::interned]
|
||||
fn intern_chalk_impl(&self, impl_: Impl) -> crate::ty::traits::GlobalImplId;
|
||||
#[salsa::interned]
|
||||
fn intern_assoc_ty_value(
|
||||
&self,
|
||||
assoc_ty_value: AssocTyValue,
|
||||
) -> crate::ty::traits::AssocTyValueId;
|
||||
|
||||
#[salsa::invoke(crate::ty::traits::chalk::associated_ty_data_query)]
|
||||
fn associated_ty_data(
|
||||
&self,
|
||||
id: chalk_ir::TypeId,
|
||||
) -> Arc<chalk_rust_ir::AssociatedTyDatum<chalk_ir::family::ChalkIr>>;
|
||||
|
||||
#[salsa::invoke(crate::ty::traits::chalk::trait_datum_query)]
|
||||
fn trait_datum(
|
||||
&self,
|
||||
krate: Crate,
|
||||
trait_id: chalk_ir::TraitId,
|
||||
) -> Arc<chalk_rust_ir::TraitDatum<chalk_ir::family::ChalkIr>>;
|
||||
|
||||
#[salsa::invoke(crate::ty::traits::chalk::struct_datum_query)]
|
||||
fn struct_datum(
|
||||
&self,
|
||||
krate: Crate,
|
||||
struct_id: chalk_ir::StructId,
|
||||
) -> Arc<chalk_rust_ir::StructDatum<chalk_ir::family::ChalkIr>>;
|
||||
|
||||
#[salsa::invoke(crate::ty::traits::chalk::impl_datum_query)]
|
||||
fn impl_datum(
|
||||
&self,
|
||||
krate: Crate,
|
||||
impl_id: chalk_ir::ImplId,
|
||||
) -> Arc<chalk_rust_ir::ImplDatum<chalk_ir::family::ChalkIr>>;
|
||||
|
||||
#[salsa::invoke(crate::ty::traits::chalk::associated_ty_value_query)]
|
||||
fn associated_ty_value(
|
||||
&self,
|
||||
krate: Crate,
|
||||
id: chalk_rust_ir::AssociatedTyValueId,
|
||||
) -> Arc<chalk_rust_ir::AssociatedTyValue<chalk_ir::family::ChalkIr>>;
|
||||
|
||||
#[salsa::invoke(crate::ty::traits::trait_solve_query)]
|
||||
fn trait_solve(
|
||||
&self,
|
||||
krate: Crate,
|
||||
goal: crate::ty::Canonical<crate::ty::InEnvironment<crate::ty::Obligation>>,
|
||||
) -> Option<crate::ty::traits::Solution>;
|
||||
}
|
||||
pub use hir_ty::db::{
|
||||
AssociatedTyDataQuery, CallableItemSignatureQuery, FieldTypesQuery, GenericDefaultsQuery,
|
||||
GenericPredicatesQuery, HirDatabase, HirDatabaseStorage, ImplDatumQuery, ImplsForTraitQuery,
|
||||
ImplsInCrateQuery, InferQuery, StructDatumQuery, TraitDatumQuery, TraitSolveQuery, TyQuery,
|
||||
ValueTyQuery,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn hir_database_is_object_safe() {
|
||||
|
|
|
@ -1,88 +1,4 @@
|
|||
//! FIXME: write short doc here
|
||||
|
||||
use std::any::Any;
|
||||
|
||||
use hir_expand::HirFileId;
|
||||
use ra_syntax::{ast, AstNode, AstPtr, SyntaxNodePtr};
|
||||
|
||||
use crate::{db::AstDatabase, Name, Source};
|
||||
|
||||
pub use hir_def::diagnostics::UnresolvedModule;
|
||||
pub use hir_expand::diagnostics::{AstDiagnostic, Diagnostic, DiagnosticSink};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct NoSuchField {
|
||||
pub file: HirFileId,
|
||||
pub field: AstPtr<ast::RecordField>,
|
||||
}
|
||||
|
||||
impl Diagnostic for NoSuchField {
|
||||
fn message(&self) -> String {
|
||||
"no such field".to_string()
|
||||
}
|
||||
|
||||
fn source(&self) -> Source<SyntaxNodePtr> {
|
||||
Source { file_id: self.file, value: self.field.into() }
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MissingFields {
|
||||
pub file: HirFileId,
|
||||
pub field_list: AstPtr<ast::RecordFieldList>,
|
||||
pub missed_fields: Vec<Name>,
|
||||
}
|
||||
|
||||
impl Diagnostic for MissingFields {
|
||||
fn message(&self) -> String {
|
||||
"fill structure fields".to_string()
|
||||
}
|
||||
fn source(&self) -> Source<SyntaxNodePtr> {
|
||||
Source { file_id: self.file, value: self.field_list.into() }
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl AstDiagnostic for MissingFields {
|
||||
type AST = ast::RecordFieldList;
|
||||
|
||||
fn ast(&self, db: &impl AstDatabase) -> Self::AST {
|
||||
let root = db.parse_or_expand(self.source().file_id).unwrap();
|
||||
let node = self.source().value.to_node(&root);
|
||||
ast::RecordFieldList::cast(node).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MissingOkInTailExpr {
|
||||
pub file: HirFileId,
|
||||
pub expr: AstPtr<ast::Expr>,
|
||||
}
|
||||
|
||||
impl Diagnostic for MissingOkInTailExpr {
|
||||
fn message(&self) -> String {
|
||||
"wrap return expression in Ok".to_string()
|
||||
}
|
||||
fn source(&self) -> Source<SyntaxNodePtr> {
|
||||
Source { file_id: self.file, value: self.expr.into() }
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl AstDiagnostic for MissingOkInTailExpr {
|
||||
type AST = ast::Expr;
|
||||
|
||||
fn ast(&self, db: &impl AstDatabase) -> Self::AST {
|
||||
let root = db.parse_or_expand(self.file).unwrap();
|
||||
let node = self.source().value.to_node(&root);
|
||||
ast::Expr::cast(node).unwrap()
|
||||
}
|
||||
}
|
||||
pub use hir_ty::diagnostics::{MissingFields, MissingOkInTailExpr, NoSuchField};
|
||||
|
|
|
@ -4,14 +4,13 @@
|
|||
//! are splitting the hir.
|
||||
|
||||
use hir_def::{
|
||||
AdtId, AssocItemId, AttrDefId, ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId,
|
||||
GenericDefId, ModuleDefId, StaticId, StructFieldId, StructId, TypeAliasId, UnionId, VariantId,
|
||||
AdtId, AssocItemId, AttrDefId, DefWithBodyId, EnumVariantId, GenericDefId, ModuleDefId,
|
||||
StructFieldId, VariantId,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
ty::{CallableDef, TypableDef},
|
||||
Adt, AssocItem, AttrDef, Const, Crate, DefWithBody, EnumVariant, Function, GenericDef,
|
||||
ModuleDef, Static, StructField, TypeAlias, VariantDef,
|
||||
Adt, AssocItem, AttrDef, Crate, DefWithBody, EnumVariant, GenericDef, ModuleDef, StructField,
|
||||
VariantDef,
|
||||
};
|
||||
|
||||
impl From<ra_db::CrateId> for Crate {
|
||||
|
@ -138,72 +137,6 @@ impl From<GenericDef> for GenericDefId {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<GenericDefId> for GenericDef {
|
||||
fn from(def: GenericDefId) -> Self {
|
||||
match def {
|
||||
GenericDefId::FunctionId(it) => GenericDef::Function(it.into()),
|
||||
GenericDefId::AdtId(it) => GenericDef::Adt(it.into()),
|
||||
GenericDefId::TraitId(it) => GenericDef::Trait(it.into()),
|
||||
GenericDefId::TypeAliasId(it) => GenericDef::TypeAlias(it.into()),
|
||||
GenericDefId::ImplId(it) => GenericDef::ImplBlock(it.into()),
|
||||
GenericDefId::EnumVariantId(it) => GenericDef::EnumVariant(it.into()),
|
||||
GenericDefId::ConstId(it) => GenericDef::Const(it.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<AdtId> for TypableDef {
|
||||
fn from(id: AdtId) -> Self {
|
||||
Adt::from(id).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<StructId> for TypableDef {
|
||||
fn from(id: StructId) -> Self {
|
||||
AdtId::StructId(id).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<UnionId> for TypableDef {
|
||||
fn from(id: UnionId) -> Self {
|
||||
AdtId::UnionId(id).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<EnumId> for TypableDef {
|
||||
fn from(id: EnumId) -> Self {
|
||||
AdtId::EnumId(id).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<EnumVariantId> for TypableDef {
|
||||
fn from(id: EnumVariantId) -> Self {
|
||||
EnumVariant::from(id).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TypeAliasId> for TypableDef {
|
||||
fn from(id: TypeAliasId) -> Self {
|
||||
TypeAlias::from(id).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FunctionId> for TypableDef {
|
||||
fn from(id: FunctionId) -> Self {
|
||||
Function::from(id).into()
|
||||
}
|
||||
}
|
||||
impl From<ConstId> for TypableDef {
|
||||
fn from(id: ConstId) -> Self {
|
||||
Const::from(id).into()
|
||||
}
|
||||
}
|
||||
impl From<StaticId> for TypableDef {
|
||||
fn from(id: StaticId) -> Self {
|
||||
Static::from(id).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Adt> for GenericDefId {
|
||||
fn from(id: Adt) -> Self {
|
||||
match id {
|
||||
|
@ -214,14 +147,12 @@ impl From<Adt> for GenericDefId {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<CallableDef> for GenericDefId {
|
||||
fn from(def: CallableDef) -> Self {
|
||||
impl From<VariantId> for VariantDef {
|
||||
fn from(def: VariantId) -> Self {
|
||||
match def {
|
||||
CallableDef::Function(it) => it.id.into(),
|
||||
CallableDef::Struct(it) => it.id.into(),
|
||||
CallableDef::EnumVariant(it) => {
|
||||
EnumVariantId { parent: it.parent.id, local_id: it.id }.into()
|
||||
}
|
||||
VariantId::StructId(it) => VariantDef::Struct(it.into()),
|
||||
VariantId::EnumVariantId(it) => VariantDef::EnumVariant(it.into()),
|
||||
VariantId::UnionId(it) => VariantDef::Union(it.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -231,6 +162,7 @@ impl From<VariantDef> for VariantId {
|
|||
match def {
|
||||
VariantDef::Struct(it) => VariantId::StructId(it.id),
|
||||
VariantDef::EnumVariant(it) => VariantId::EnumVariantId(it.into()),
|
||||
VariantDef::Union(it) => VariantId::UnionId(it.id),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -241,6 +173,12 @@ impl From<StructField> for StructFieldId {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<StructFieldId> for StructField {
|
||||
fn from(def: StructFieldId) -> Self {
|
||||
StructField { parent: def.parent.into(), id: def.local_id }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<AttrDef> for AttrDefId {
|
||||
fn from(def: AttrDef) -> Self {
|
||||
match def {
|
||||
|
@ -257,3 +195,13 @@ impl From<AttrDef> for AttrDefId {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<AssocItem> for GenericDefId {
|
||||
fn from(item: AssocItem) -> Self {
|
||||
match item {
|
||||
AssocItem::Function(f) => f.id.into(),
|
||||
AssocItem::Const(c) => c.id.into(),
|
||||
AssocItem::TypeAlias(t) => t.id.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! FIXME: write short doc here
|
||||
|
||||
use hir_def::{AstItemDef, LocationCtx, ModuleId, StructId, StructOrUnionId, UnionId};
|
||||
use hir_def::{AstItemDef, LocationCtx, ModuleId};
|
||||
use hir_expand::{name::AsName, AstId, MacroDefId, MacroDefKind};
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode, NameOwner},
|
||||
|
@ -19,19 +19,18 @@ pub trait FromSource: Sized {
|
|||
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self>;
|
||||
}
|
||||
|
||||
// FIXIME: these two impls are wrong, `ast::StructDef` might produce either a struct or a union
|
||||
impl FromSource for Struct {
|
||||
type Ast = ast::StructDef;
|
||||
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
|
||||
let id: StructOrUnionId = from_source(db, src)?;
|
||||
Some(Struct { id: StructId(id) })
|
||||
let id = from_source(db, src)?;
|
||||
Some(Struct { id })
|
||||
}
|
||||
}
|
||||
impl FromSource for Union {
|
||||
type Ast = ast::StructDef;
|
||||
type Ast = ast::UnionDef;
|
||||
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
|
||||
let id: StructOrUnionId = from_source(db, src)?;
|
||||
Some(Union { id: UnionId(id) })
|
||||
let id = from_source(db, src)?;
|
||||
Some(Union { id })
|
||||
}
|
||||
}
|
||||
impl FromSource for Enum {
|
||||
|
@ -263,13 +262,12 @@ impl Module {
|
|||
|
||||
let original_file = src.file_id.original_file(db);
|
||||
|
||||
let (krate, module_id) =
|
||||
db.relevant_crates(original_file).iter().find_map(|&crate_id| {
|
||||
let crate_def_map = db.crate_def_map(crate_id);
|
||||
let local_module_id = crate_def_map.modules_for_file(original_file).next()?;
|
||||
Some((crate_id, local_module_id))
|
||||
})?;
|
||||
Some(Module { id: ModuleId { krate, module_id } })
|
||||
let (krate, local_id) = db.relevant_crates(original_file).iter().find_map(|&crate_id| {
|
||||
let crate_def_map = db.crate_def_map(crate_id);
|
||||
let local_id = crate_def_map.modules_for_file(original_file).next()?;
|
||||
Some((crate_id, local_id))
|
||||
})?;
|
||||
Some(Module { id: ModuleId { krate, local_id } })
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -32,28 +32,20 @@ pub mod db;
|
|||
pub mod source_binder;
|
||||
|
||||
mod ty;
|
||||
mod expr;
|
||||
pub mod diagnostics;
|
||||
mod util;
|
||||
|
||||
mod from_id;
|
||||
mod code_model;
|
||||
|
||||
pub mod from_source;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test_db;
|
||||
#[cfg(test)]
|
||||
mod marks;
|
||||
|
||||
pub use crate::{
|
||||
code_model::{
|
||||
src::HasSource, Adt, AssocItem, AttrDef, Const, Container, Crate, CrateDependency,
|
||||
DefWithBody, Docs, Enum, EnumVariant, FieldSource, Function, GenericDef, GenericParam,
|
||||
HasAttrs, ImplBlock, Import, Local, MacroDef, Module, ModuleDef, ModuleSource, ScopeDef,
|
||||
Static, Struct, StructField, Trait, TypeAlias, Union, VariantDef,
|
||||
Static, Struct, StructField, Trait, Type, TypeAlias, Union, VariantDef,
|
||||
},
|
||||
expr::ExprScopes,
|
||||
from_source::FromSource,
|
||||
source_binder::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer},
|
||||
ty::{
|
||||
|
@ -64,6 +56,7 @@ pub use crate::{
|
|||
};
|
||||
|
||||
pub use hir_def::{
|
||||
body::scope::ExprScopes,
|
||||
builtin_type::BuiltinType,
|
||||
docs::Documentation,
|
||||
path::{Path, PathKind},
|
||||
|
|
|
@ -8,13 +8,17 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use hir_def::{
|
||||
body::{
|
||||
scope::{ExprScopes, ScopeId},
|
||||
BodySourceMap,
|
||||
},
|
||||
expr::{ExprId, PatId},
|
||||
path::known,
|
||||
resolver::{self, resolver_for_scope, HasResolver, Resolver, TypeNs, ValueNs},
|
||||
DefWithBodyId,
|
||||
AssocItemId, DefWithBodyId,
|
||||
};
|
||||
use hir_expand::{
|
||||
name::AsName, AstId, HirFileId, MacroCallId, MacroCallLoc, MacroFileKind, Source,
|
||||
hygiene::Hygiene, name::AsName, AstId, HirFileId, MacroCallId, MacroFileKind, Source,
|
||||
};
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode},
|
||||
|
@ -25,10 +29,12 @@ use ra_syntax::{
|
|||
|
||||
use crate::{
|
||||
db::HirDatabase,
|
||||
expr::{BodySourceMap, ExprScopes, ScopeId},
|
||||
ty::method_resolution::{self, implements_trait},
|
||||
ty::{
|
||||
method_resolution::{self, implements_trait},
|
||||
InEnvironment, TraitEnvironment, Ty,
|
||||
},
|
||||
Adt, AssocItem, Const, DefWithBody, Either, Enum, EnumVariant, FromSource, Function,
|
||||
GenericParam, Local, MacroDef, Name, Path, ScopeDef, Static, Struct, Trait, Ty, TypeAlias,
|
||||
GenericParam, Local, MacroDef, Name, Path, ScopeDef, Static, Struct, Trait, Type, TypeAlias,
|
||||
};
|
||||
|
||||
fn try_get_resolver_for_node(db: &impl HirDatabase, node: Source<&SyntaxNode>) -> Option<Resolver> {
|
||||
|
@ -88,7 +94,7 @@ pub struct SourceAnalyzer {
|
|||
body_owner: Option<DefWithBody>,
|
||||
body_source_map: Option<Arc<BodySourceMap>>,
|
||||
infer: Option<Arc<crate::ty::InferenceResult>>,
|
||||
scopes: Option<Arc<crate::expr::ExprScopes>>,
|
||||
scopes: Option<Arc<ExprScopes>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
|
@ -165,7 +171,7 @@ impl SourceAnalyzer {
|
|||
resolver,
|
||||
body_owner: Some(def),
|
||||
body_source_map: Some(source_map),
|
||||
infer: Some(db.infer(def)),
|
||||
infer: Some(db.infer(def.into())),
|
||||
scopes: Some(scopes),
|
||||
file_id: node.file_id,
|
||||
}
|
||||
|
@ -195,48 +201,52 @@ impl SourceAnalyzer {
|
|||
self.body_source_map.as_ref()?.node_pat(src)
|
||||
}
|
||||
|
||||
pub fn type_of(&self, _db: &impl HirDatabase, expr: &ast::Expr) -> Option<crate::Ty> {
|
||||
pub fn type_of(&self, db: &impl HirDatabase, expr: &ast::Expr) -> Option<Type> {
|
||||
let expr_id = self.expr_id(expr)?;
|
||||
Some(self.infer.as_ref()?[expr_id].clone())
|
||||
let ty = self.infer.as_ref()?[expr_id].clone();
|
||||
let environment = TraitEnvironment::lower(db, &self.resolver);
|
||||
Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } })
|
||||
}
|
||||
|
||||
pub fn type_of_pat(&self, _db: &impl HirDatabase, pat: &ast::Pat) -> Option<crate::Ty> {
|
||||
pub fn type_of_pat(&self, db: &impl HirDatabase, pat: &ast::Pat) -> Option<Type> {
|
||||
let pat_id = self.pat_id(pat)?;
|
||||
Some(self.infer.as_ref()?[pat_id].clone())
|
||||
let ty = self.infer.as_ref()?[pat_id].clone();
|
||||
let environment = TraitEnvironment::lower(db, &self.resolver);
|
||||
Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } })
|
||||
}
|
||||
|
||||
pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
|
||||
let expr_id = self.expr_id(&call.clone().into())?;
|
||||
self.infer.as_ref()?.method_resolution(expr_id)
|
||||
self.infer.as_ref()?.method_resolution(expr_id).map(Function::from)
|
||||
}
|
||||
|
||||
pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> {
|
||||
let expr_id = self.expr_id(&field.clone().into())?;
|
||||
self.infer.as_ref()?.field_resolution(expr_id)
|
||||
self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
|
||||
}
|
||||
|
||||
pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<crate::StructField> {
|
||||
let expr_id = self.expr_id(&field.expr()?)?;
|
||||
self.infer.as_ref()?.record_field_resolution(expr_id)
|
||||
self.infer.as_ref()?.record_field_resolution(expr_id).map(|it| it.into())
|
||||
}
|
||||
|
||||
pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<crate::VariantDef> {
|
||||
let expr_id = self.expr_id(&record_lit.clone().into())?;
|
||||
self.infer.as_ref()?.variant_resolution_for_expr(expr_id)
|
||||
self.infer.as_ref()?.variant_resolution_for_expr(expr_id).map(|it| it.into())
|
||||
}
|
||||
|
||||
pub fn resolve_record_pattern(&self, record_pat: &ast::RecordPat) -> Option<crate::VariantDef> {
|
||||
let pat_id = self.pat_id(&record_pat.clone().into())?;
|
||||
self.infer.as_ref()?.variant_resolution_for_pat(pat_id)
|
||||
self.infer.as_ref()?.variant_resolution_for_pat(pat_id).map(|it| it.into())
|
||||
}
|
||||
|
||||
pub fn resolve_macro_call(
|
||||
&self,
|
||||
db: &impl HirDatabase,
|
||||
macro_call: &ast::MacroCall,
|
||||
macro_call: Source<&ast::MacroCall>,
|
||||
) -> Option<MacroDef> {
|
||||
// This must be a normal source file rather than macro file.
|
||||
let path = macro_call.path().and_then(Path::from_ast)?;
|
||||
let hygiene = Hygiene::new(db, macro_call.file_id);
|
||||
let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &hygiene))?;
|
||||
self.resolver.resolve_path_as_macro(db, &path).map(|it| it.into())
|
||||
}
|
||||
|
||||
|
@ -248,7 +258,7 @@ impl SourceAnalyzer {
|
|||
let types = self.resolver.resolve_path_in_type_ns_fully(db, &path).map(|ty| match ty {
|
||||
TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
|
||||
TypeNs::GenericParam(idx) => PathResolution::GenericParam(GenericParam {
|
||||
parent: self.resolver.generic_def().unwrap().into(),
|
||||
parent: self.resolver.generic_def().unwrap(),
|
||||
idx,
|
||||
}),
|
||||
TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => {
|
||||
|
@ -290,13 +300,13 @@ impl SourceAnalyzer {
|
|||
if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) {
|
||||
let expr_id = self.expr_id(&path_expr.into())?;
|
||||
if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) {
|
||||
return Some(PathResolution::AssocItem(assoc));
|
||||
return Some(PathResolution::AssocItem(assoc.into()));
|
||||
}
|
||||
}
|
||||
if let Some(path_pat) = path.syntax().parent().and_then(ast::PathPat::cast) {
|
||||
let pat_id = self.pat_id(&path_pat.into())?;
|
||||
if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) {
|
||||
return Some(PathResolution::AssocItem(assoc));
|
||||
return Some(PathResolution::AssocItem(assoc.into()));
|
||||
}
|
||||
}
|
||||
// This must be a normal source file rather than macro file.
|
||||
|
@ -323,7 +333,7 @@ impl SourceAnalyzer {
|
|||
resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
|
||||
resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
|
||||
resolver::ScopeDef::GenericParam(idx) => {
|
||||
let parent = self.resolver.generic_def().unwrap().into();
|
||||
let parent = self.resolver.generic_def().unwrap();
|
||||
ScopeDef::GenericParam(GenericParam { parent, idx })
|
||||
}
|
||||
resolver::ScopeDef::Local(pat_id) => {
|
||||
|
@ -358,14 +368,14 @@ impl SourceAnalyzer {
|
|||
pub fn iterate_method_candidates<T>(
|
||||
&self,
|
||||
db: &impl HirDatabase,
|
||||
ty: Ty,
|
||||
ty: &Type,
|
||||
name: Option<&Name>,
|
||||
mut callback: impl FnMut(&Ty, Function) -> Option<T>,
|
||||
) -> Option<T> {
|
||||
// There should be no inference vars in types passed here
|
||||
// FIXME check that?
|
||||
// FIXME replace Unknown by bound vars here
|
||||
let canonical = crate::ty::Canonical { value: ty, num_vars: 0 };
|
||||
let canonical = crate::ty::Canonical { value: ty.ty.value.clone(), num_vars: 0 };
|
||||
method_resolution::iterate_method_candidates(
|
||||
&canonical,
|
||||
db,
|
||||
|
@ -373,7 +383,7 @@ impl SourceAnalyzer {
|
|||
name,
|
||||
method_resolution::LookupMode::MethodCall,
|
||||
|ty, it| match it {
|
||||
AssocItem::Function(f) => callback(ty, f),
|
||||
AssocItemId::FunctionId(f) => callback(ty, f.into()),
|
||||
_ => None,
|
||||
},
|
||||
)
|
||||
|
@ -382,34 +392,37 @@ impl SourceAnalyzer {
|
|||
pub fn iterate_path_candidates<T>(
|
||||
&self,
|
||||
db: &impl HirDatabase,
|
||||
ty: Ty,
|
||||
ty: &Type,
|
||||
name: Option<&Name>,
|
||||
callback: impl FnMut(&Ty, AssocItem) -> Option<T>,
|
||||
mut callback: impl FnMut(&Ty, AssocItem) -> Option<T>,
|
||||
) -> Option<T> {
|
||||
// There should be no inference vars in types passed here
|
||||
// FIXME check that?
|
||||
// FIXME replace Unknown by bound vars here
|
||||
let canonical = crate::ty::Canonical { value: ty, num_vars: 0 };
|
||||
let canonical = crate::ty::Canonical { value: ty.ty.value.clone(), num_vars: 0 };
|
||||
method_resolution::iterate_method_candidates(
|
||||
&canonical,
|
||||
db,
|
||||
&self.resolver,
|
||||
name,
|
||||
method_resolution::LookupMode::Path,
|
||||
callback,
|
||||
|ty, it| callback(ty, it.into()),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn autoderef<'a>(
|
||||
&'a self,
|
||||
db: &'a impl HirDatabase,
|
||||
ty: Ty,
|
||||
) -> impl Iterator<Item = Ty> + 'a {
|
||||
// There should be no inference vars in types passed here
|
||||
// FIXME check that?
|
||||
let canonical = crate::ty::Canonical { value: ty, num_vars: 0 };
|
||||
crate::ty::autoderef(db, &self.resolver, canonical).map(|canonical| canonical.value)
|
||||
}
|
||||
// pub fn autoderef<'a>(
|
||||
// &'a self,
|
||||
// db: &'a impl HirDatabase,
|
||||
// ty: Ty,
|
||||
// ) -> impl Iterator<Item = Ty> + 'a {
|
||||
// // There should be no inference vars in types passed here
|
||||
// // FIXME check that?
|
||||
// let canonical = crate::ty::Canonical { value: ty, num_vars: 0 };
|
||||
// let krate = self.resolver.krate();
|
||||
// let environment = TraitEnvironment::lower(db, &self.resolver);
|
||||
// let ty = crate::ty::InEnvironment { value: canonical, environment };
|
||||
// crate::ty::autoderef(db, krate, ty).map(|canonical| canonical.value)
|
||||
// }
|
||||
|
||||
/// Checks that particular type `ty` implements `std::future::Future`.
|
||||
/// This function is used in `.await` syntax completion.
|
||||
|
@ -435,32 +448,16 @@ impl SourceAnalyzer {
|
|||
db: &impl HirDatabase,
|
||||
macro_call: Source<&ast::MacroCall>,
|
||||
) -> Option<Expansion> {
|
||||
let def = self.resolve_macro_call(db, macro_call.value)?.id;
|
||||
let def = self.resolve_macro_call(db, macro_call)?.id;
|
||||
let ast_id = AstId::new(
|
||||
macro_call.file_id,
|
||||
db.ast_id_map(macro_call.file_id).ast_id(macro_call.value),
|
||||
);
|
||||
let macro_call_loc = MacroCallLoc { def, ast_id };
|
||||
Some(Expansion {
|
||||
macro_call_id: db.intern_macro(macro_call_loc),
|
||||
macro_call_id: def.as_call_id(db, ast_id),
|
||||
macro_file_kind: to_macro_file_kind(macro_call.value),
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn body_source_map(&self) -> Arc<BodySourceMap> {
|
||||
self.body_source_map.clone().unwrap()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn inference_result(&self) -> Arc<crate::ty::InferenceResult> {
|
||||
self.infer.clone().unwrap()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn analyzed_declaration(&self) -> Option<DefWithBody> {
|
||||
self.body_owner
|
||||
}
|
||||
}
|
||||
|
||||
fn scope_for(
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,12 +0,0 @@
|
|||
//! Internal utility functions.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Helper for mutating `Arc<[T]>` (i.e. `Arc::make_mut` for Arc slices).
|
||||
/// The underlying values are cloned if there are other strong references.
|
||||
pub(crate) fn make_mut_slice<T: Clone>(a: &mut Arc<[T]>) -> &mut [T] {
|
||||
if Arc::get_mut(a).is_none() {
|
||||
*a = a.iter().cloned().collect();
|
||||
}
|
||||
Arc::get_mut(a).unwrap()
|
||||
}
|
|
@ -12,25 +12,25 @@ use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner};
|
|||
|
||||
use crate::{
|
||||
db::DefDatabase, trace::Trace, type_ref::TypeRef, AstItemDef, EnumId, HasChildSource,
|
||||
LocalEnumVariantId, LocalStructFieldId, StructOrUnionId, VariantId,
|
||||
LocalEnumVariantId, LocalStructFieldId, StructId, UnionId, VariantId,
|
||||
};
|
||||
|
||||
/// Note that we use `StructData` for unions as well!
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct StructData {
|
||||
pub name: Option<Name>,
|
||||
pub name: Name,
|
||||
pub variant_data: Arc<VariantData>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct EnumData {
|
||||
pub name: Option<Name>,
|
||||
pub name: Name,
|
||||
pub variants: Arena<LocalEnumVariantId, EnumVariantData>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct EnumVariantData {
|
||||
pub name: Option<Name>,
|
||||
pub name: Name,
|
||||
pub variant_data: Arc<VariantData>,
|
||||
}
|
||||
|
||||
|
@ -49,26 +49,38 @@ pub struct StructFieldData {
|
|||
}
|
||||
|
||||
impl StructData {
|
||||
pub(crate) fn struct_data_query(db: &impl DefDatabase, id: StructOrUnionId) -> Arc<StructData> {
|
||||
pub(crate) fn struct_data_query(db: &impl DefDatabase, id: StructId) -> Arc<StructData> {
|
||||
let src = id.source(db);
|
||||
let name = src.value.name().map(|n| n.as_name());
|
||||
let name = src.value.name().map_or_else(Name::missing, |n| n.as_name());
|
||||
let variant_data = VariantData::new(src.value.kind());
|
||||
let variant_data = Arc::new(variant_data);
|
||||
Arc::new(StructData { name, variant_data })
|
||||
}
|
||||
pub(crate) fn union_data_query(db: &impl DefDatabase, id: UnionId) -> Arc<StructData> {
|
||||
let src = id.source(db);
|
||||
let name = src.value.name().map_or_else(Name::missing, |n| n.as_name());
|
||||
let variant_data = VariantData::new(
|
||||
src.value
|
||||
.record_field_def_list()
|
||||
.map(ast::StructKind::Record)
|
||||
.unwrap_or(ast::StructKind::Unit),
|
||||
);
|
||||
let variant_data = Arc::new(variant_data);
|
||||
Arc::new(StructData { name, variant_data })
|
||||
}
|
||||
}
|
||||
|
||||
impl EnumData {
|
||||
pub(crate) fn enum_data_query(db: &impl DefDatabase, e: EnumId) -> Arc<EnumData> {
|
||||
let src = e.source(db);
|
||||
let name = src.value.name().map(|n| n.as_name());
|
||||
let name = src.value.name().map_or_else(Name::missing, |n| n.as_name());
|
||||
let mut trace = Trace::new_for_arena();
|
||||
lower_enum(&mut trace, &src.value);
|
||||
Arc::new(EnumData { name, variants: trace.into_arena() })
|
||||
}
|
||||
|
||||
pub(crate) fn variant(&self, name: &Name) -> Option<LocalEnumVariantId> {
|
||||
let (id, _) = self.variants.iter().find(|(_id, data)| data.name.as_ref() == Some(name))?;
|
||||
pub fn variant(&self, name: &Name) -> Option<LocalEnumVariantId> {
|
||||
let (id, _) = self.variants.iter().find(|(_id, data)| &data.name == name)?;
|
||||
Some(id)
|
||||
}
|
||||
}
|
||||
|
@ -92,7 +104,7 @@ fn lower_enum(
|
|||
trace.alloc(
|
||||
|| var.clone(),
|
||||
|| EnumVariantData {
|
||||
name: var.name().map(|it| it.as_name()),
|
||||
name: var.name().map_or_else(Name::missing, |it| it.as_name()),
|
||||
variant_data: Arc::new(VariantData::new(var.kind())),
|
||||
},
|
||||
);
|
||||
|
@ -117,6 +129,10 @@ impl VariantData {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn field(&self, name: &Name) -> Option<LocalStructFieldId> {
|
||||
self.fields().iter().find_map(|(id, data)| if &data.name == name { Some(id) } else { None })
|
||||
}
|
||||
|
||||
pub fn is_unit(&self) -> bool {
|
||||
match self {
|
||||
VariantData::Unit => true,
|
||||
|
@ -137,7 +153,12 @@ impl HasChildSource for VariantId {
|
|||
let src = it.parent.child_source(db);
|
||||
src.map(|map| map[it.local_id].kind())
|
||||
}
|
||||
VariantId::StructId(it) => it.0.source(db).map(|it| it.kind()),
|
||||
VariantId::StructId(it) => it.source(db).map(|it| it.kind()),
|
||||
VariantId::UnionId(it) => it.source(db).map(|it| {
|
||||
it.record_field_def_list()
|
||||
.map(ast::StructKind::Record)
|
||||
.unwrap_or(ast::StructKind::Unit)
|
||||
}),
|
||||
};
|
||||
let mut trace = Trace::new_for_map();
|
||||
lower_struct(&mut trace, &src.value);
|
||||
|
|
|
@ -35,7 +35,7 @@ impl Attrs {
|
|||
match def {
|
||||
AttrDefId::ModuleId(module) => {
|
||||
let def_map = db.crate_def_map(module.krate);
|
||||
let src = match def_map[module.module_id].declaration_source(db) {
|
||||
let src = match def_map[module.local_id].declaration_source(db) {
|
||||
Some(it) => it,
|
||||
None => return Attrs::default(),
|
||||
};
|
||||
|
@ -54,9 +54,9 @@ impl Attrs {
|
|||
Attrs::from_attrs_owner(db, src.map(|it| it as &dyn AttrsOwner))
|
||||
}
|
||||
AttrDefId::AdtId(it) => match it {
|
||||
AdtId::StructId(it) => attrs_from_ast(it.0.lookup_intern(db).ast_id, db),
|
||||
AdtId::StructId(it) => attrs_from_ast(it.lookup_intern(db).ast_id, db),
|
||||
AdtId::EnumId(it) => attrs_from_ast(it.lookup_intern(db).ast_id, db),
|
||||
AdtId::UnionId(it) => attrs_from_ast(it.0.lookup_intern(db).ast_id, db),
|
||||
AdtId::UnionId(it) => attrs_from_ast(it.lookup_intern(db).ast_id, db),
|
||||
},
|
||||
AttrDefId::TraitId(it) => attrs_from_ast(it.lookup_intern(db).ast_id, db),
|
||||
AttrDefId::MacroDefId(it) => attrs_from_ast(it.ast_id, db),
|
||||
|
|
|
@ -6,8 +6,7 @@ pub mod scope;
|
|||
use std::{ops::Index, sync::Arc};
|
||||
|
||||
use hir_expand::{
|
||||
either::Either, hygiene::Hygiene, AstId, HirFileId, MacroCallLoc, MacroDefId, MacroFileKind,
|
||||
Source,
|
||||
either::Either, hygiene::Hygiene, AstId, HirFileId, MacroDefId, MacroFileKind, Source,
|
||||
};
|
||||
use ra_arena::{map::ArenaMap, Arena};
|
||||
use ra_syntax::{ast, AstNode, AstPtr};
|
||||
|
@ -47,7 +46,7 @@ impl Expander {
|
|||
|
||||
if let Some(path) = macro_call.path().and_then(|path| self.parse_path(path)) {
|
||||
if let Some(def) = self.resolve_path_as_macro(db, &path) {
|
||||
let call_id = db.intern_macro(MacroCallLoc { def, ast_id });
|
||||
let call_id = def.as_call_id(db, ast_id);
|
||||
let file_id = call_id.as_file(MacroFileKind::Expr);
|
||||
if let Some(node) = db.parse_or_expand(file_id) {
|
||||
if let Some(expr) = ast::Expr::cast(node) {
|
||||
|
@ -83,7 +82,7 @@ impl Expander {
|
|||
}
|
||||
|
||||
fn resolve_path_as_macro(&self, db: &impl DefDatabase, path: &Path) -> Option<MacroDefId> {
|
||||
self.crate_def_map.resolve_path(db, self.module.module_id, path).0.take_macros()
|
||||
self.crate_def_map.resolve_path(db, self.module.local_id, path).0.take_macros()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -86,39 +86,53 @@ impl TypeAliasData {
|
|||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct TraitData {
|
||||
pub name: Option<Name>,
|
||||
pub items: Vec<AssocItemId>,
|
||||
pub name: Name,
|
||||
pub items: Vec<(Name, AssocItemId)>,
|
||||
pub auto: bool,
|
||||
}
|
||||
|
||||
impl TraitData {
|
||||
pub(crate) fn trait_data_query(db: &impl DefDatabase, tr: TraitId) -> Arc<TraitData> {
|
||||
let src = tr.source(db);
|
||||
let name = src.value.name().map(|n| n.as_name());
|
||||
let name = src.value.name().map_or_else(Name::missing, |n| n.as_name());
|
||||
let auto = src.value.is_auto();
|
||||
let ast_id_map = db.ast_id_map(src.file_id);
|
||||
|
||||
let container = ContainerId::TraitId(tr);
|
||||
let items = if let Some(item_list) = src.value.item_list() {
|
||||
item_list
|
||||
.impl_items()
|
||||
.map(|item_node| match item_node {
|
||||
ast::ImplItem::FnDef(it) => FunctionLoc {
|
||||
container: ContainerId::TraitId(tr),
|
||||
ast_id: AstId::new(src.file_id, ast_id_map.ast_id(&it)),
|
||||
ast::ImplItem::FnDef(it) => {
|
||||
let name = it.name().map_or_else(Name::missing, |it| it.as_name());
|
||||
let def = FunctionLoc {
|
||||
container,
|
||||
ast_id: AstId::new(src.file_id, ast_id_map.ast_id(&it)),
|
||||
}
|
||||
.intern(db)
|
||||
.into();
|
||||
(name, def)
|
||||
}
|
||||
.intern(db)
|
||||
.into(),
|
||||
ast::ImplItem::ConstDef(it) => ConstLoc {
|
||||
container: ContainerId::TraitId(tr),
|
||||
ast_id: AstId::new(src.file_id, ast_id_map.ast_id(&it)),
|
||||
ast::ImplItem::ConstDef(it) => {
|
||||
let name = it.name().map_or_else(Name::missing, |it| it.as_name());
|
||||
let def = ConstLoc {
|
||||
container,
|
||||
ast_id: AstId::new(src.file_id, ast_id_map.ast_id(&it)),
|
||||
}
|
||||
.intern(db)
|
||||
.into();
|
||||
(name, def)
|
||||
}
|
||||
.intern(db)
|
||||
.into(),
|
||||
ast::ImplItem::TypeAliasDef(it) => TypeAliasLoc {
|
||||
container: ContainerId::TraitId(tr),
|
||||
ast_id: AstId::new(src.file_id, ast_id_map.ast_id(&it)),
|
||||
ast::ImplItem::TypeAliasDef(it) => {
|
||||
let name = it.name().map_or_else(Name::missing, |it| it.as_name());
|
||||
let def = TypeAliasLoc {
|
||||
container,
|
||||
ast_id: AstId::new(src.file_id, ast_id_map.ast_id(&it)),
|
||||
}
|
||||
.intern(db)
|
||||
.into();
|
||||
(name, def)
|
||||
}
|
||||
.intern(db)
|
||||
.into(),
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
|
@ -128,11 +142,18 @@ impl TraitData {
|
|||
}
|
||||
|
||||
pub fn associated_types(&self) -> impl Iterator<Item = TypeAliasId> + '_ {
|
||||
self.items.iter().filter_map(|item| match item {
|
||||
self.items.iter().filter_map(|(_name, item)| match item {
|
||||
AssocItemId::TypeAliasId(t) => Some(*t),
|
||||
_ => None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn associated_type_by_name(&self, name: &Name) -> Option<TypeAliasId> {
|
||||
self.items.iter().find_map(|(item_name, item)| match item {
|
||||
AssocItemId::TypeAliasId(t) if item_name == name => Some(*t),
|
||||
_ => None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
|
@ -193,6 +214,7 @@ impl ImplData {
|
|||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ConstData {
|
||||
/// const _: () = ();
|
||||
pub name: Option<Name>,
|
||||
pub type_ref: TypeRef,
|
||||
}
|
||||
|
|
|
@ -18,8 +18,8 @@ use crate::{
|
|||
CrateDefMap,
|
||||
},
|
||||
AttrDefId, ConstId, ConstLoc, DefWithBodyId, EnumId, FunctionId, FunctionLoc, GenericDefId,
|
||||
ImplId, ItemLoc, ModuleId, StaticId, StaticLoc, StructOrUnionId, TraitId, TypeAliasId,
|
||||
TypeAliasLoc,
|
||||
ImplId, ItemLoc, ModuleId, StaticId, StaticLoc, StructId, TraitId, TypeAliasId, TypeAliasLoc,
|
||||
UnionId,
|
||||
};
|
||||
|
||||
#[salsa::query_group(InternDatabaseStorage)]
|
||||
|
@ -27,7 +27,9 @@ pub trait InternDatabase: SourceDatabase {
|
|||
#[salsa::interned]
|
||||
fn intern_function(&self, loc: FunctionLoc) -> FunctionId;
|
||||
#[salsa::interned]
|
||||
fn intern_struct_or_union(&self, loc: ItemLoc<ast::StructDef>) -> StructOrUnionId;
|
||||
fn intern_struct(&self, loc: ItemLoc<ast::StructDef>) -> StructId;
|
||||
#[salsa::interned]
|
||||
fn intern_union(&self, loc: ItemLoc<ast::UnionDef>) -> UnionId;
|
||||
#[salsa::interned]
|
||||
fn intern_enum(&self, loc: ItemLoc<ast::EnumDef>) -> EnumId;
|
||||
#[salsa::interned]
|
||||
|
@ -57,7 +59,9 @@ pub trait DefDatabase: InternDatabase + AstDatabase {
|
|||
fn crate_def_map(&self, krate: CrateId) -> Arc<CrateDefMap>;
|
||||
|
||||
#[salsa::invoke(StructData::struct_data_query)]
|
||||
fn struct_data(&self, id: StructOrUnionId) -> Arc<StructData>;
|
||||
fn struct_data(&self, id: StructId) -> Arc<StructData>;
|
||||
#[salsa::invoke(StructData::union_data_query)]
|
||||
fn union_data(&self, id: UnionId) -> Arc<StructData>;
|
||||
|
||||
#[salsa::invoke(EnumData::enum_data_query)]
|
||||
fn enum_data(&self, e: EnumId) -> Arc<EnumData>;
|
||||
|
|
|
@ -36,7 +36,7 @@ impl Documentation {
|
|||
match def {
|
||||
AttrDefId::ModuleId(module) => {
|
||||
let def_map = db.crate_def_map(module.krate);
|
||||
let src = def_map[module.module_id].declaration_source(db)?;
|
||||
let src = def_map[module.local_id].declaration_source(db)?;
|
||||
docs_from_ast(&src.value)
|
||||
}
|
||||
AttrDefId::StructFieldId(it) => {
|
||||
|
@ -47,9 +47,9 @@ impl Documentation {
|
|||
}
|
||||
}
|
||||
AttrDefId::AdtId(it) => match it {
|
||||
AdtId::StructId(it) => docs_from_ast(&it.0.source(db).value),
|
||||
AdtId::StructId(it) => docs_from_ast(&it.source(db).value),
|
||||
AdtId::EnumId(it) => docs_from_ast(&it.source(db).value),
|
||||
AdtId::UnionId(it) => docs_from_ast(&it.0.source(db).value),
|
||||
AdtId::UnionId(it) => docs_from_ast(&it.source(db).value),
|
||||
},
|
||||
AttrDefId::EnumVariantId(it) => {
|
||||
let src = it.parent.child_source(db);
|
||||
|
|
|
@ -60,10 +60,8 @@ impl GenericParams {
|
|||
// FIXME: add `: Sized` bound for everything except for `Self` in traits
|
||||
match def {
|
||||
GenericDefId::FunctionId(it) => generics.fill(&it.lookup(db).source(db).value, start),
|
||||
GenericDefId::AdtId(AdtId::StructId(it)) => {
|
||||
generics.fill(&it.0.source(db).value, start)
|
||||
}
|
||||
GenericDefId::AdtId(AdtId::UnionId(it)) => generics.fill(&it.0.source(db).value, start),
|
||||
GenericDefId::AdtId(AdtId::StructId(it)) => generics.fill(&it.source(db).value, start),
|
||||
GenericDefId::AdtId(AdtId::UnionId(it)) => generics.fill(&it.source(db).value, start),
|
||||
GenericDefId::AdtId(AdtId::EnumId(it)) => generics.fill(&it.source(db).value, start),
|
||||
GenericDefId::TraitId(it) => {
|
||||
// traits get the Self type as an implicit first type parameter
|
||||
|
|
|
@ -41,7 +41,7 @@ impl LangItems {
|
|||
crate_def_map
|
||||
.modules
|
||||
.iter()
|
||||
.filter_map(|(module_id, _)| db.module_lang_items(ModuleId { krate, module_id }))
|
||||
.filter_map(|(local_id, _)| db.module_lang_items(ModuleId { krate, local_id }))
|
||||
.for_each(|it| lang_items.items.extend(it.items.iter().map(|(k, v)| (k.clone(), *v))));
|
||||
|
||||
Arc::new(lang_items)
|
||||
|
@ -80,7 +80,7 @@ impl LangItems {
|
|||
fn collect_lang_items(&mut self, db: &impl DefDatabase, module: ModuleId) {
|
||||
// Look for impl targets
|
||||
let def_map = db.crate_def_map(module.krate);
|
||||
let module_data = &def_map[module.module_id];
|
||||
let module_data = &def_map[module.local_id];
|
||||
for &impl_block in module_data.impls.iter() {
|
||||
self.collect_lang_item(db, impl_block, LangItemTarget::ImplBlockId)
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ pub mod body;
|
|||
pub mod resolver;
|
||||
|
||||
mod trace;
|
||||
mod nameres;
|
||||
pub mod nameres;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test_db;
|
||||
|
@ -50,7 +50,7 @@ impl_arena_id!(LocalImportId);
|
|||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct ModuleId {
|
||||
pub krate: CrateId,
|
||||
pub module_id: LocalModuleId,
|
||||
pub local_id: LocalModuleId,
|
||||
}
|
||||
|
||||
/// An ID of a module, **local** to a specific crate
|
||||
|
@ -141,30 +141,26 @@ impl Lookup for FunctionId {
|
|||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct StructOrUnionId(salsa::InternId);
|
||||
impl_intern_key!(StructOrUnionId);
|
||||
impl AstItemDef<ast::StructDef> for StructOrUnionId {
|
||||
pub struct StructId(salsa::InternId);
|
||||
impl_intern_key!(StructId);
|
||||
impl AstItemDef<ast::StructDef> for StructId {
|
||||
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::StructDef>) -> Self {
|
||||
db.intern_struct_or_union(loc)
|
||||
db.intern_struct(loc)
|
||||
}
|
||||
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::StructDef> {
|
||||
db.lookup_intern_struct_or_union(self)
|
||||
db.lookup_intern_struct(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct StructId(pub StructOrUnionId);
|
||||
impl From<StructId> for StructOrUnionId {
|
||||
fn from(id: StructId) -> StructOrUnionId {
|
||||
id.0
|
||||
pub struct UnionId(salsa::InternId);
|
||||
impl_intern_key!(UnionId);
|
||||
impl AstItemDef<ast::UnionDef> for UnionId {
|
||||
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::UnionDef>) -> Self {
|
||||
db.intern_union(loc)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct UnionId(pub StructOrUnionId);
|
||||
impl From<UnionId> for StructOrUnionId {
|
||||
fn from(id: UnionId) -> StructOrUnionId {
|
||||
id.0
|
||||
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::UnionDef> {
|
||||
db.lookup_intern_union(self)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -402,6 +398,16 @@ impl_froms!(
|
|||
ConstId
|
||||
);
|
||||
|
||||
impl From<AssocItemId> for GenericDefId {
|
||||
fn from(item: AssocItemId) -> Self {
|
||||
match item {
|
||||
AssocItemId::FunctionId(f) => f.into(),
|
||||
AssocItemId::ConstId(c) => c.into(),
|
||||
AssocItemId::TypeAliasId(t) => t.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum AttrDefId {
|
||||
ModuleId(ModuleId),
|
||||
|
@ -435,6 +441,7 @@ impl_froms!(
|
|||
pub enum VariantId {
|
||||
EnumVariantId(EnumVariantId),
|
||||
StructId(StructId),
|
||||
UnionId(UnionId),
|
||||
}
|
||||
impl_froms!(VariantId: EnumVariantId, StructId);
|
||||
|
||||
|
@ -485,13 +492,23 @@ impl HasModule for ConstLoc {
|
|||
impl HasModule for AdtId {
|
||||
fn module(&self, db: &impl db::DefDatabase) -> ModuleId {
|
||||
match self {
|
||||
AdtId::StructId(it) => it.0.module(db),
|
||||
AdtId::UnionId(it) => it.0.module(db),
|
||||
AdtId::StructId(it) => it.module(db),
|
||||
AdtId::UnionId(it) => it.module(db),
|
||||
AdtId::EnumId(it) => it.module(db),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasModule for DefWithBodyId {
|
||||
fn module(&self, db: &impl db::DefDatabase) -> ModuleId {
|
||||
match self {
|
||||
DefWithBodyId::FunctionId(it) => it.lookup(db).module(db),
|
||||
DefWithBodyId::StaticId(it) => it.lookup(db).module(db),
|
||||
DefWithBodyId::ConstId(it) => it.lookup(db).module(db),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasModule for StaticLoc {
|
||||
fn module(&self, _db: &impl db::DefDatabase) -> ModuleId {
|
||||
self.container
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
use hir_expand::{
|
||||
builtin_macro::find_builtin_macro,
|
||||
name::{self, AsName, Name},
|
||||
HirFileId, MacroCallId, MacroCallLoc, MacroDefId, MacroDefKind, MacroFileKind,
|
||||
HirFileId, MacroCallId, MacroDefId, MacroDefKind, MacroFileKind,
|
||||
};
|
||||
use ra_cfg::CfgOptions;
|
||||
use ra_db::{CrateId, FileId};
|
||||
|
@ -25,7 +25,7 @@ use crate::{
|
|||
per_ns::PerNs,
|
||||
AdtId, AstId, AstItemDef, ConstLoc, ContainerId, EnumId, EnumVariantId, FunctionLoc, ImplId,
|
||||
Intern, LocalImportId, LocalModuleId, LocationCtx, ModuleDefId, ModuleId, StaticLoc, StructId,
|
||||
StructOrUnionId, TraitId, TypeAliasLoc, UnionId,
|
||||
TraitId, TypeAliasLoc, UnionId,
|
||||
};
|
||||
|
||||
pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap {
|
||||
|
@ -37,7 +37,7 @@ pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> C
|
|||
log::debug!("crate dep {:?} -> {:?}", dep.name, dep.crate_id);
|
||||
def_map.extern_prelude.insert(
|
||||
dep.as_name(),
|
||||
ModuleId { krate: dep.crate_id, module_id: dep_def_map.root }.into(),
|
||||
ModuleId { krate: dep.crate_id, local_id: dep_def_map.root }.into(),
|
||||
);
|
||||
|
||||
// look for the prelude
|
||||
|
@ -323,7 +323,7 @@ where
|
|||
tested_by!(glob_across_crates);
|
||||
// glob import from other crate => we can just import everything once
|
||||
let item_map = self.db.crate_def_map(m.krate);
|
||||
let scope = &item_map[m.module_id].scope;
|
||||
let scope = &item_map[m.local_id].scope;
|
||||
|
||||
// Module scoped macros is included
|
||||
let items = scope
|
||||
|
@ -337,7 +337,7 @@ where
|
|||
// glob import from same crate => we do an initial
|
||||
// import, and then need to propagate any further
|
||||
// additions
|
||||
let scope = &self.def_map[m.module_id].scope;
|
||||
let scope = &self.def_map[m.local_id].scope;
|
||||
|
||||
// Module scoped macros is included
|
||||
let items = scope
|
||||
|
@ -349,7 +349,7 @@ where
|
|||
self.update(module_id, Some(import_id), &items);
|
||||
// record the glob import in case we add further items
|
||||
self.glob_imports
|
||||
.entry(m.module_id)
|
||||
.entry(m.local_id)
|
||||
.or_default()
|
||||
.push((module_id, import_id));
|
||||
}
|
||||
|
@ -362,7 +362,7 @@ where
|
|||
.variants
|
||||
.iter()
|
||||
.filter_map(|(local_id, variant_data)| {
|
||||
let name = variant_data.name.clone()?;
|
||||
let name = variant_data.name.clone();
|
||||
let variant = EnumVariantId { parent: e, local_id };
|
||||
let res = Resolution {
|
||||
def: PerNs::both(variant.into(), variant.into()),
|
||||
|
@ -480,7 +480,7 @@ where
|
|||
);
|
||||
|
||||
if let Some(def) = resolved_res.resolved_def.take_macros() {
|
||||
let call_id = self.db.intern_macro(MacroCallLoc { def, ast_id: *ast_id });
|
||||
let call_id = def.as_call_id(self.db, *ast_id);
|
||||
resolved.push((*module_id, call_id, def));
|
||||
res = ReachedFixedPoint::No;
|
||||
return false;
|
||||
|
@ -590,7 +590,7 @@ where
|
|||
raw::RawItemKind::Impl(imp) => {
|
||||
let module = ModuleId {
|
||||
krate: self.def_collector.def_map.krate,
|
||||
module_id: self.module_id,
|
||||
local_id: self.module_id,
|
||||
};
|
||||
let ctx = LocationCtx::new(self.def_collector.db, module, self.file_id);
|
||||
let imp_id = ImplId::from_ast_id(ctx, self.raw_items[imp].ast_id);
|
||||
|
@ -673,7 +673,7 @@ where
|
|||
modules[self.module_id].children.insert(name.clone(), res);
|
||||
let resolution = Resolution {
|
||||
def: PerNs::types(
|
||||
ModuleId { krate: self.def_collector.def_map.krate, module_id: res }.into(),
|
||||
ModuleId { krate: self.def_collector.def_map.krate, local_id: res }.into(),
|
||||
),
|
||||
import: None,
|
||||
};
|
||||
|
@ -682,8 +682,7 @@ where
|
|||
}
|
||||
|
||||
fn define_def(&mut self, def: &raw::DefData) {
|
||||
let module =
|
||||
ModuleId { krate: self.def_collector.def_map.krate, module_id: self.module_id };
|
||||
let module = ModuleId { krate: self.def_collector.def_map.krate, local_id: self.module_id };
|
||||
let ctx = LocationCtx::new(self.def_collector.db, module, self.file_id);
|
||||
|
||||
let name = def.name.clone();
|
||||
|
@ -698,14 +697,12 @@ where
|
|||
PerNs::values(def.into())
|
||||
}
|
||||
raw::DefKind::Struct(ast_id) => {
|
||||
let id = StructOrUnionId::from_ast_id(ctx, ast_id).into();
|
||||
let s = StructId(id).into();
|
||||
PerNs::both(s, s)
|
||||
let id = StructId::from_ast_id(ctx, ast_id).into();
|
||||
PerNs::both(id, id)
|
||||
}
|
||||
raw::DefKind::Union(ast_id) => {
|
||||
let id = StructOrUnionId::from_ast_id(ctx, ast_id).into();
|
||||
let u = UnionId(id).into();
|
||||
PerNs::both(u, u)
|
||||
let id = UnionId::from_ast_id(ctx, ast_id).into();
|
||||
PerNs::both(id, id)
|
||||
}
|
||||
raw::DefKind::Enum(ast_id) => PerNs::types(EnumId::from_ast_id(ctx, ast_id).into()),
|
||||
raw::DefKind::Const(ast_id) => {
|
||||
|
@ -775,8 +772,7 @@ where
|
|||
if let Some(macro_def) = mac.path.as_ident().and_then(|name| {
|
||||
self.def_collector.def_map[self.module_id].scope.get_legacy_macro(&name)
|
||||
}) {
|
||||
let macro_call_id =
|
||||
self.def_collector.db.intern_macro(MacroCallLoc { def: macro_def, ast_id });
|
||||
let macro_call_id = macro_def.as_call_id(self.def_collector.db, ast_id);
|
||||
|
||||
self.def_collector.collect_macro_expansion(self.module_id, macro_call_id, macro_def);
|
||||
return;
|
||||
|
|
|
@ -74,19 +74,19 @@ impl CrateDefMap {
|
|||
PathKind::DollarCrate(krate) => {
|
||||
if krate == self.krate {
|
||||
tested_by!(macro_dollar_crate_self);
|
||||
PerNs::types(ModuleId { krate: self.krate, module_id: self.root }.into())
|
||||
PerNs::types(ModuleId { krate: self.krate, local_id: self.root }.into())
|
||||
} else {
|
||||
let def_map = db.crate_def_map(krate);
|
||||
let module = ModuleId { krate, module_id: def_map.root };
|
||||
let module = ModuleId { krate, local_id: def_map.root };
|
||||
tested_by!(macro_dollar_crate_other);
|
||||
PerNs::types(module.into())
|
||||
}
|
||||
}
|
||||
PathKind::Crate => {
|
||||
PerNs::types(ModuleId { krate: self.krate, module_id: self.root }.into())
|
||||
PerNs::types(ModuleId { krate: self.krate, local_id: self.root }.into())
|
||||
}
|
||||
PathKind::Self_ => {
|
||||
PerNs::types(ModuleId { krate: self.krate, module_id: original_module }.into())
|
||||
PerNs::types(ModuleId { krate: self.krate, local_id: original_module }.into())
|
||||
}
|
||||
// plain import or absolute path in 2015: crate-relative with
|
||||
// fallback to extern prelude (with the simplification in
|
||||
|
@ -113,7 +113,7 @@ impl CrateDefMap {
|
|||
}
|
||||
PathKind::Super => {
|
||||
if let Some(p) = self.modules[original_module].parent {
|
||||
PerNs::types(ModuleId { krate: self.krate, module_id: p }.into())
|
||||
PerNs::types(ModuleId { krate: self.krate, local_id: p }.into())
|
||||
} else {
|
||||
log::debug!("super path in root module");
|
||||
return ResolvePathResult::empty(ReachedFixedPoint::Yes);
|
||||
|
@ -160,7 +160,7 @@ impl CrateDefMap {
|
|||
Path { segments: path.segments[i..].to_vec(), kind: PathKind::Self_ };
|
||||
log::debug!("resolving {:?} in other crate", path);
|
||||
let defp_map = db.crate_def_map(module.krate);
|
||||
let (def, s) = defp_map.resolve_path(db, module.module_id, &path);
|
||||
let (def, s) = defp_map.resolve_path(db, module.local_id, &path);
|
||||
return ResolvePathResult::with(
|
||||
def,
|
||||
ReachedFixedPoint::Yes,
|
||||
|
@ -169,7 +169,7 @@ impl CrateDefMap {
|
|||
}
|
||||
|
||||
// Since it is a qualified path here, it should not contains legacy macros
|
||||
match self[module.module_id].scope.get(&segment.name) {
|
||||
match self[module.local_id].scope.get(&segment.name) {
|
||||
Some(res) => res.def,
|
||||
_ => {
|
||||
log::debug!("path segment {:?} not found", segment.name);
|
||||
|
@ -254,7 +254,7 @@ impl CrateDefMap {
|
|||
keep = db.crate_def_map(prelude.krate);
|
||||
&keep
|
||||
};
|
||||
def_map[prelude.module_id].scope.get(name).map_or_else(PerNs::none, |res| res.def)
|
||||
def_map[prelude.local_id].scope.get(name).map_or_else(PerNs::none, |res| res.def)
|
||||
} else {
|
||||
PerNs::none()
|
||||
}
|
||||
|
|
|
@ -176,7 +176,7 @@ pub(super) struct DefData {
|
|||
pub(super) enum DefKind {
|
||||
Function(FileAstId<ast::FnDef>),
|
||||
Struct(FileAstId<ast::StructDef>),
|
||||
Union(FileAstId<ast::StructDef>),
|
||||
Union(FileAstId<ast::UnionDef>),
|
||||
Enum(FileAstId<ast::EnumDef>),
|
||||
Const(FileAstId<ast::ConstDef>),
|
||||
Static(FileAstId<ast::StaticDef>),
|
||||
|
@ -246,11 +246,12 @@ impl RawItemsCollector {
|
|||
ast::ModuleItem::StructDef(it) => {
|
||||
let id = self.source_ast_id_map.ast_id(&it);
|
||||
let name = it.name();
|
||||
if it.is_union() {
|
||||
(DefKind::Union(id), name)
|
||||
} else {
|
||||
(DefKind::Struct(id), name)
|
||||
}
|
||||
(DefKind::Struct(id), name)
|
||||
}
|
||||
ast::ModuleItem::UnionDef(it) => {
|
||||
let id = self.source_ast_id_map.ast_id(&it);
|
||||
let name = it.name();
|
||||
(DefKind::Union(id), name)
|
||||
}
|
||||
ast::ModuleItem::EnumDef(it) => {
|
||||
(DefKind::Enum(self.source_ast_id_map.ast_id(&it)), it.name())
|
||||
|
|
|
@ -82,6 +82,12 @@ fn crate_def_map_smoke_test() {
|
|||
|
||||
//- /foo/bar.rs
|
||||
pub struct Baz;
|
||||
|
||||
union U {
|
||||
to_be: bool,
|
||||
not_to_be: u8,
|
||||
}
|
||||
|
||||
enum E { V }
|
||||
",
|
||||
);
|
||||
|
@ -99,6 +105,7 @@ fn crate_def_map_smoke_test() {
|
|||
⋮crate::foo::bar
|
||||
⋮Baz: t v
|
||||
⋮E: t
|
||||
⋮U: t v
|
||||
"###)
|
||||
}
|
||||
|
||||
|
|
|
@ -97,7 +97,7 @@ impl Path {
|
|||
|
||||
/// Converts an `ast::Path` to `Path`. Works with use trees.
|
||||
/// It correctly handles `$crate` based path from macro call.
|
||||
pub(crate) fn from_src(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path> {
|
||||
pub fn from_src(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path> {
|
||||
let mut kind = PathKind::Plain;
|
||||
let mut segments = Vec::new();
|
||||
loop {
|
||||
|
|
|
@ -61,6 +61,8 @@ pub enum TypeNs {
|
|||
GenericParam(u32),
|
||||
AdtId(AdtId),
|
||||
AdtSelfType(AdtId),
|
||||
// Yup, enum variants are added to the types ns, but any usage of variant as
|
||||
// type is an error.
|
||||
EnumVariantId(EnumVariantId),
|
||||
TypeAliasId(TypeAliasId),
|
||||
BuiltinType(BuiltinType),
|
||||
|
@ -323,7 +325,7 @@ impl Resolver {
|
|||
if let Scope::ModuleScope(m) = scope {
|
||||
if let Some(prelude) = m.crate_def_map.prelude {
|
||||
let prelude_def_map = db.crate_def_map(prelude.krate);
|
||||
traits.extend(prelude_def_map[prelude.module_id].scope.traits());
|
||||
traits.extend(prelude_def_map[prelude.local_id].scope.traits());
|
||||
}
|
||||
traits.extend(m.crate_def_map[m.module_id].scope.traits());
|
||||
}
|
||||
|
@ -400,7 +402,7 @@ impl Scope {
|
|||
});
|
||||
if let Some(prelude) = m.crate_def_map.prelude {
|
||||
let prelude_def_map = db.crate_def_map(prelude.krate);
|
||||
prelude_def_map[prelude.module_id].scope.entries().for_each(|(name, res)| {
|
||||
prelude_def_map[prelude.local_id].scope.entries().for_each(|(name, res)| {
|
||||
f(name.clone(), ScopeDef::PerNs(res.def));
|
||||
});
|
||||
}
|
||||
|
@ -482,7 +484,7 @@ impl Resolver {
|
|||
}
|
||||
}
|
||||
|
||||
pub trait HasResolver {
|
||||
pub trait HasResolver: Copy {
|
||||
/// Builds a resolver for type references inside this def.
|
||||
fn resolver(self, db: &impl DefDatabase) -> Resolver;
|
||||
}
|
||||
|
@ -490,7 +492,7 @@ pub trait HasResolver {
|
|||
impl HasResolver for ModuleId {
|
||||
fn resolver(self, db: &impl DefDatabase) -> Resolver {
|
||||
let def_map = db.crate_def_map(self.krate);
|
||||
Resolver::default().push_module_scope(def_map, self.module_id)
|
||||
Resolver::default().push_module_scope(def_map, self.local_id)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -500,7 +502,7 @@ impl HasResolver for TraitId {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: Into<AdtId>> HasResolver for T {
|
||||
impl<T: Into<AdtId> + Copy> HasResolver for T {
|
||||
fn resolver(self, db: &impl DefDatabase) -> Resolver {
|
||||
let def = self.into();
|
||||
def.module(db)
|
||||
|
|
|
@ -24,7 +24,9 @@ impl salsa::Database for TestDB {
|
|||
fn salsa_runtime(&self) -> &salsa::Runtime<Self> {
|
||||
&self.runtime
|
||||
}
|
||||
|
||||
fn salsa_runtime_mut(&mut self) -> &mut salsa::Runtime<Self> {
|
||||
&mut self.runtime
|
||||
}
|
||||
fn salsa_event(&self, event: impl Fn() -> salsa::Event<TestDB>) {
|
||||
let mut events = self.events.lock().unwrap();
|
||||
if let Some(events) = &mut *events {
|
||||
|
|
|
@ -135,6 +135,16 @@ pub struct MacroDefId {
|
|||
pub kind: MacroDefKind,
|
||||
}
|
||||
|
||||
impl MacroDefId {
|
||||
pub fn as_call_id(
|
||||
self,
|
||||
db: &dyn db::AstDatabase,
|
||||
ast_id: AstId<ast::MacroCall>,
|
||||
) -> MacroCallId {
|
||||
db.intern_macro(MacroCallLoc { def: self, ast_id })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum MacroDefKind {
|
||||
Declarative,
|
||||
|
@ -143,8 +153,8 @@ pub enum MacroDefKind {
|
|||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct MacroCallLoc {
|
||||
pub def: MacroDefId,
|
||||
pub ast_id: AstId<ast::MacroCall>,
|
||||
pub(crate) def: MacroDefId,
|
||||
pub(crate) ast_id: AstId<ast::MacroCall>,
|
||||
}
|
||||
|
||||
impl MacroCallId {
|
||||
|
|
|
@ -23,6 +23,10 @@ impl salsa::Database for TestDB {
|
|||
&self.runtime
|
||||
}
|
||||
|
||||
fn salsa_runtime_mut(&mut self) -> &mut salsa::Runtime<Self> {
|
||||
&mut self.runtime
|
||||
}
|
||||
|
||||
fn salsa_event(&self, event: impl Fn() -> salsa::Event<TestDB>) {
|
||||
let mut events = self.events.lock().unwrap();
|
||||
if let Some(events) = &mut *events {
|
||||
|
|
32
crates/ra_hir_ty/Cargo.toml
Normal file
32
crates/ra_hir_ty/Cargo.toml
Normal file
|
@ -0,0 +1,32 @@
|
|||
[package]
|
||||
edition = "2018"
|
||||
name = "ra_hir_ty"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
arrayvec = "0.5.1"
|
||||
ena = "0.13"
|
||||
log = "0.4.5"
|
||||
rustc-hash = "1.0"
|
||||
|
||||
hir_def = { path = "../ra_hir_def", package = "ra_hir_def" }
|
||||
hir_expand = { path = "../ra_hir_expand", package = "ra_hir_expand" }
|
||||
ra_arena = { path = "../ra_arena" }
|
||||
ra_db = { path = "../ra_db" }
|
||||
ra_prof = { path = "../ra_prof" }
|
||||
ra_syntax = { path = "../ra_syntax" }
|
||||
test_utils = { path = "../test_utils" }
|
||||
|
||||
# https://github.com/rust-lang/chalk/pull/294
|
||||
chalk-solve = { git = "https://github.com/jackh726/chalk.git", rev = "095cd38a4f16337913bba487f2055b9ca0179f30" }
|
||||
chalk-rust-ir = { git = "https://github.com/jackh726/chalk.git", rev = "095cd38a4f16337913bba487f2055b9ca0179f30" }
|
||||
chalk-ir = { git = "https://github.com/jackh726/chalk.git", rev = "095cd38a4f16337913bba487f2055b9ca0179f30" }
|
||||
|
||||
lalrpop-intern = "0.15.1"
|
||||
|
||||
[dev-dependencies]
|
||||
insta = "0.12.0"
|
|
@ -5,49 +5,56 @@
|
|||
|
||||
use std::iter::successors;
|
||||
|
||||
use hir_def::{lang_item::LangItemTarget, resolver::Resolver};
|
||||
use hir_def::lang_item::LangItemTarget;
|
||||
use hir_expand::name;
|
||||
use log::{info, warn};
|
||||
use ra_db::CrateId;
|
||||
|
||||
use crate::{db::HirDatabase, Trait};
|
||||
use crate::db::HirDatabase;
|
||||
|
||||
use super::{traits::Solution, Canonical, Substs, Ty, TypeWalk};
|
||||
use super::{
|
||||
traits::{InEnvironment, Solution},
|
||||
Canonical, Substs, Ty, TypeWalk,
|
||||
};
|
||||
|
||||
const AUTODEREF_RECURSION_LIMIT: usize = 10;
|
||||
|
||||
pub(crate) fn autoderef<'a>(
|
||||
pub fn autoderef<'a>(
|
||||
db: &'a impl HirDatabase,
|
||||
resolver: &'a Resolver,
|
||||
ty: Canonical<Ty>,
|
||||
krate: Option<CrateId>,
|
||||
ty: InEnvironment<Canonical<Ty>>,
|
||||
) -> impl Iterator<Item = Canonical<Ty>> + 'a {
|
||||
successors(Some(ty), move |ty| deref(db, resolver, ty)).take(AUTODEREF_RECURSION_LIMIT)
|
||||
let InEnvironment { value: ty, environment } = ty;
|
||||
successors(Some(ty), move |ty| {
|
||||
deref(db, krate?, InEnvironment { value: ty, environment: environment.clone() })
|
||||
})
|
||||
.take(AUTODEREF_RECURSION_LIMIT)
|
||||
}
|
||||
|
||||
pub(crate) fn deref(
|
||||
db: &impl HirDatabase,
|
||||
resolver: &Resolver,
|
||||
ty: &Canonical<Ty>,
|
||||
krate: CrateId,
|
||||
ty: InEnvironment<&Canonical<Ty>>,
|
||||
) -> Option<Canonical<Ty>> {
|
||||
if let Some(derefed) = ty.value.builtin_deref() {
|
||||
Some(Canonical { value: derefed, num_vars: ty.num_vars })
|
||||
if let Some(derefed) = ty.value.value.builtin_deref() {
|
||||
Some(Canonical { value: derefed, num_vars: ty.value.num_vars })
|
||||
} else {
|
||||
deref_by_trait(db, resolver, ty)
|
||||
deref_by_trait(db, krate, ty)
|
||||
}
|
||||
}
|
||||
|
||||
fn deref_by_trait(
|
||||
db: &impl HirDatabase,
|
||||
resolver: &Resolver,
|
||||
ty: &Canonical<Ty>,
|
||||
krate: CrateId,
|
||||
ty: InEnvironment<&Canonical<Ty>>,
|
||||
) -> Option<Canonical<Ty>> {
|
||||
let krate = resolver.krate()?;
|
||||
let deref_trait = match db.lang_item(krate.into(), "deref".into())? {
|
||||
LangItemTarget::TraitId(t) => Trait::from(t),
|
||||
LangItemTarget::TraitId(it) => it,
|
||||
_ => return None,
|
||||
};
|
||||
let target = deref_trait.associated_type_by_name(db, &name::TARGET_TYPE)?;
|
||||
let target = db.trait_data(deref_trait).associated_type_by_name(&name::TARGET_TYPE)?;
|
||||
|
||||
let generic_params = db.generic_params(target.id.into());
|
||||
let generic_params = db.generic_params(target.into());
|
||||
if generic_params.count_params_including_parent() != 1 {
|
||||
// the Target type + Deref trait should only have one generic parameter,
|
||||
// namely Deref's Self type
|
||||
|
@ -56,10 +63,8 @@ fn deref_by_trait(
|
|||
|
||||
// FIXME make the Canonical handling nicer
|
||||
|
||||
let env = super::lower::trait_env(db, resolver);
|
||||
|
||||
let parameters = Substs::build_for_generics(&generic_params)
|
||||
.push(ty.value.clone().shift_bound_vars(1))
|
||||
.push(ty.value.value.clone().shift_bound_vars(1))
|
||||
.build();
|
||||
|
||||
let projection = super::traits::ProjectionPredicate {
|
||||
|
@ -69,9 +74,9 @@ fn deref_by_trait(
|
|||
|
||||
let obligation = super::Obligation::Projection(projection);
|
||||
|
||||
let in_env = super::traits::InEnvironment { value: obligation, environment: env };
|
||||
let in_env = InEnvironment { value: obligation, environment: ty.environment };
|
||||
|
||||
let canonical = super::Canonical { num_vars: 1 + ty.num_vars, value: in_env };
|
||||
let canonical = super::Canonical { num_vars: 1 + ty.value.num_vars, value: in_env };
|
||||
|
||||
let solution = db.trait_solve(krate.into(), canonical)?;
|
||||
|
||||
|
@ -89,14 +94,14 @@ fn deref_by_trait(
|
|||
// the case.
|
||||
for i in 1..vars.0.num_vars {
|
||||
if vars.0.value[i] != Ty::Bound((i - 1) as u32) {
|
||||
warn!("complex solution for derefing {:?}: {:?}, ignoring", ty, solution);
|
||||
warn!("complex solution for derefing {:?}: {:?}, ignoring", ty.value, solution);
|
||||
return None;
|
||||
}
|
||||
}
|
||||
Some(Canonical { value: vars.0.value[0].clone(), num_vars: vars.0.num_vars })
|
||||
}
|
||||
Solution::Ambig(_) => {
|
||||
info!("Ambiguous solution for derefing {:?}: {:?}", ty, solution);
|
||||
info!("Ambiguous solution for derefing {:?}: {:?}", ty.value, solution);
|
||||
None
|
||||
}
|
||||
}
|
119
crates/ra_hir_ty/src/db.rs
Normal file
119
crates/ra_hir_ty/src/db.rs
Normal file
|
@ -0,0 +1,119 @@
|
|||
//! FIXME: write short doc here
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use hir_def::{
|
||||
db::DefDatabase, DefWithBodyId, GenericDefId, ImplId, LocalStructFieldId, TraitId, VariantId,
|
||||
};
|
||||
use ra_arena::map::ArenaMap;
|
||||
use ra_db::{salsa, CrateId};
|
||||
|
||||
use crate::{
|
||||
method_resolution::CrateImplBlocks,
|
||||
traits::{AssocTyValue, Impl},
|
||||
CallableDef, FnSig, GenericPredicate, ImplTy, InferenceResult, Substs, Ty, TyDefId, TypeCtor,
|
||||
ValueTyDefId,
|
||||
};
|
||||
|
||||
#[salsa::query_group(HirDatabaseStorage)]
|
||||
#[salsa::requires(salsa::Database)]
|
||||
pub trait HirDatabase: DefDatabase {
|
||||
#[salsa::invoke(crate::infer_query)]
|
||||
fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
|
||||
|
||||
#[salsa::invoke(crate::lower::ty_query)]
|
||||
fn ty(&self, def: TyDefId) -> Ty;
|
||||
|
||||
#[salsa::invoke(crate::lower::value_ty_query)]
|
||||
fn value_ty(&self, def: ValueTyDefId) -> Ty;
|
||||
|
||||
#[salsa::invoke(crate::lower::impl_ty_query)]
|
||||
fn impl_ty(&self, def: ImplId) -> ImplTy;
|
||||
|
||||
#[salsa::invoke(crate::lower::field_types_query)]
|
||||
fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalStructFieldId, Ty>>;
|
||||
|
||||
#[salsa::invoke(crate::callable_item_sig)]
|
||||
fn callable_item_signature(&self, def: CallableDef) -> FnSig;
|
||||
|
||||
#[salsa::invoke(crate::lower::generic_predicates_for_param_query)]
|
||||
fn generic_predicates_for_param(
|
||||
&self,
|
||||
def: GenericDefId,
|
||||
param_idx: u32,
|
||||
) -> Arc<[GenericPredicate]>;
|
||||
|
||||
#[salsa::invoke(crate::lower::generic_predicates_query)]
|
||||
fn generic_predicates(&self, def: GenericDefId) -> Arc<[GenericPredicate]>;
|
||||
|
||||
#[salsa::invoke(crate::lower::generic_defaults_query)]
|
||||
fn generic_defaults(&self, def: GenericDefId) -> Substs;
|
||||
|
||||
#[salsa::invoke(crate::method_resolution::CrateImplBlocks::impls_in_crate_query)]
|
||||
fn impls_in_crate(&self, krate: CrateId) -> Arc<CrateImplBlocks>;
|
||||
|
||||
#[salsa::invoke(crate::traits::impls_for_trait_query)]
|
||||
fn impls_for_trait(&self, krate: CrateId, trait_: TraitId) -> Arc<[ImplId]>;
|
||||
|
||||
/// This provides the Chalk trait solver instance. Because Chalk always
|
||||
/// works from a specific crate, this query is keyed on the crate; and
|
||||
/// because Chalk does its own internal caching, the solver is wrapped in a
|
||||
/// Mutex and the query does an untracked read internally, to make sure the
|
||||
/// cached state is thrown away when input facts change.
|
||||
#[salsa::invoke(crate::traits::trait_solver_query)]
|
||||
fn trait_solver(&self, krate: CrateId) -> crate::traits::TraitSolver;
|
||||
|
||||
// Interned IDs for Chalk integration
|
||||
#[salsa::interned]
|
||||
fn intern_type_ctor(&self, type_ctor: TypeCtor) -> crate::TypeCtorId;
|
||||
#[salsa::interned]
|
||||
fn intern_chalk_impl(&self, impl_: Impl) -> crate::traits::GlobalImplId;
|
||||
#[salsa::interned]
|
||||
fn intern_assoc_ty_value(&self, assoc_ty_value: AssocTyValue) -> crate::traits::AssocTyValueId;
|
||||
|
||||
#[salsa::invoke(crate::traits::chalk::associated_ty_data_query)]
|
||||
fn associated_ty_data(
|
||||
&self,
|
||||
id: chalk_ir::TypeId,
|
||||
) -> Arc<chalk_rust_ir::AssociatedTyDatum<chalk_ir::family::ChalkIr>>;
|
||||
|
||||
#[salsa::invoke(crate::traits::chalk::trait_datum_query)]
|
||||
fn trait_datum(
|
||||
&self,
|
||||
krate: CrateId,
|
||||
trait_id: chalk_ir::TraitId,
|
||||
) -> Arc<chalk_rust_ir::TraitDatum<chalk_ir::family::ChalkIr>>;
|
||||
|
||||
#[salsa::invoke(crate::traits::chalk::struct_datum_query)]
|
||||
fn struct_datum(
|
||||
&self,
|
||||
krate: CrateId,
|
||||
struct_id: chalk_ir::StructId,
|
||||
) -> Arc<chalk_rust_ir::StructDatum<chalk_ir::family::ChalkIr>>;
|
||||
|
||||
#[salsa::invoke(crate::traits::chalk::impl_datum_query)]
|
||||
fn impl_datum(
|
||||
&self,
|
||||
krate: CrateId,
|
||||
impl_id: chalk_ir::ImplId,
|
||||
) -> Arc<chalk_rust_ir::ImplDatum<chalk_ir::family::ChalkIr>>;
|
||||
|
||||
#[salsa::invoke(crate::traits::chalk::associated_ty_value_query)]
|
||||
fn associated_ty_value(
|
||||
&self,
|
||||
krate: CrateId,
|
||||
id: chalk_rust_ir::AssociatedTyValueId,
|
||||
) -> Arc<chalk_rust_ir::AssociatedTyValue<chalk_ir::family::ChalkIr>>;
|
||||
|
||||
#[salsa::invoke(crate::traits::trait_solve_query)]
|
||||
fn trait_solve(
|
||||
&self,
|
||||
krate: CrateId,
|
||||
goal: crate::Canonical<crate::InEnvironment<crate::Obligation>>,
|
||||
) -> Option<crate::traits::Solution>;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hir_database_is_object_safe() {
|
||||
fn _assert_object_safe(_: &dyn HirDatabase) {}
|
||||
}
|
91
crates/ra_hir_ty/src/diagnostics.rs
Normal file
91
crates/ra_hir_ty/src/diagnostics.rs
Normal file
|
@ -0,0 +1,91 @@
|
|||
//! FIXME: write short doc here
|
||||
|
||||
use std::any::Any;
|
||||
|
||||
use hir_expand::{db::AstDatabase, name::Name, HirFileId, Source};
|
||||
use ra_syntax::{ast, AstNode, AstPtr, SyntaxNodePtr};
|
||||
|
||||
pub use hir_def::diagnostics::UnresolvedModule;
|
||||
pub use hir_expand::diagnostics::{AstDiagnostic, Diagnostic, DiagnosticSink};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct NoSuchField {
|
||||
pub file: HirFileId,
|
||||
pub field: AstPtr<ast::RecordField>,
|
||||
}
|
||||
|
||||
impl Diagnostic for NoSuchField {
|
||||
fn message(&self) -> String {
|
||||
"no such field".to_string()
|
||||
}
|
||||
|
||||
fn source(&self) -> Source<SyntaxNodePtr> {
|
||||
Source { file_id: self.file, value: self.field.into() }
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MissingFields {
|
||||
pub file: HirFileId,
|
||||
pub field_list: AstPtr<ast::RecordFieldList>,
|
||||
pub missed_fields: Vec<Name>,
|
||||
}
|
||||
|
||||
impl Diagnostic for MissingFields {
|
||||
fn message(&self) -> String {
|
||||
use std::fmt::Write;
|
||||
let mut message = String::from("Missing structure fields:\n");
|
||||
for field in &self.missed_fields {
|
||||
write!(message, "- {}\n", field).unwrap();
|
||||
}
|
||||
message
|
||||
}
|
||||
fn source(&self) -> Source<SyntaxNodePtr> {
|
||||
Source { file_id: self.file, value: self.field_list.into() }
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl AstDiagnostic for MissingFields {
|
||||
type AST = ast::RecordFieldList;
|
||||
|
||||
fn ast(&self, db: &impl AstDatabase) -> Self::AST {
|
||||
let root = db.parse_or_expand(self.source().file_id).unwrap();
|
||||
let node = self.source().value.to_node(&root);
|
||||
ast::RecordFieldList::cast(node).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MissingOkInTailExpr {
|
||||
pub file: HirFileId,
|
||||
pub expr: AstPtr<ast::Expr>,
|
||||
}
|
||||
|
||||
impl Diagnostic for MissingOkInTailExpr {
|
||||
fn message(&self) -> String {
|
||||
"wrap return expression in Ok".to_string()
|
||||
}
|
||||
fn source(&self) -> Source<SyntaxNodePtr> {
|
||||
Source { file_id: self.file, value: self.expr.into() }
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl AstDiagnostic for MissingOkInTailExpr {
|
||||
type AST = ast::Expr;
|
||||
|
||||
fn ast(&self, db: &impl AstDatabase) -> Self::AST {
|
||||
let root = db.parse_or_expand(self.file).unwrap();
|
||||
let node = self.source().value.to_node(&root);
|
||||
ast::Expr::cast(node).unwrap()
|
||||
}
|
||||
}
|
|
@ -2,8 +2,12 @@
|
|||
|
||||
use std::sync::Arc;
|
||||
|
||||
use hir_def::{path::known, resolver::HasResolver};
|
||||
use hir_expand::diagnostics::DiagnosticSink;
|
||||
use hir_def::{
|
||||
path::{known, Path},
|
||||
resolver::HasResolver,
|
||||
AdtId, FunctionId,
|
||||
};
|
||||
use hir_expand::{diagnostics::DiagnosticSink, name::Name};
|
||||
use ra_syntax::ast;
|
||||
use ra_syntax::AstPtr;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
@ -11,8 +15,7 @@ use rustc_hash::FxHashSet;
|
|||
use crate::{
|
||||
db::HirDatabase,
|
||||
diagnostics::{MissingFields, MissingOkInTailExpr},
|
||||
ty::{ApplicationTy, InferenceResult, Ty, TypeCtor},
|
||||
Adt, Function, Name, Path,
|
||||
ApplicationTy, InferenceResult, Ty, TypeCtor,
|
||||
};
|
||||
|
||||
pub use hir_def::{
|
||||
|
@ -26,23 +29,23 @@ pub use hir_def::{
|
|||
},
|
||||
};
|
||||
|
||||
pub(crate) struct ExprValidator<'a, 'b: 'a> {
|
||||
func: Function,
|
||||
pub struct ExprValidator<'a, 'b: 'a> {
|
||||
func: FunctionId,
|
||||
infer: Arc<InferenceResult>,
|
||||
sink: &'a mut DiagnosticSink<'b>,
|
||||
}
|
||||
|
||||
impl<'a, 'b> ExprValidator<'a, 'b> {
|
||||
pub(crate) fn new(
|
||||
func: Function,
|
||||
pub fn new(
|
||||
func: FunctionId,
|
||||
infer: Arc<InferenceResult>,
|
||||
sink: &'a mut DiagnosticSink<'b>,
|
||||
) -> ExprValidator<'a, 'b> {
|
||||
ExprValidator { func, infer, sink }
|
||||
}
|
||||
|
||||
pub(crate) fn validate_body(&mut self, db: &impl HirDatabase) {
|
||||
let body = self.func.body(db);
|
||||
pub fn validate_body(&mut self, db: &impl HirDatabase) {
|
||||
let body = db.body(self.func.into());
|
||||
|
||||
for e in body.exprs.iter() {
|
||||
if let (id, Expr::RecordLit { path, fields, spread }) = e {
|
||||
|
@ -69,16 +72,18 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
|||
}
|
||||
|
||||
let struct_def = match self.infer[id].as_adt() {
|
||||
Some((Adt::Struct(s), _)) => s,
|
||||
Some((AdtId::StructId(s), _)) => s,
|
||||
_ => return,
|
||||
};
|
||||
let struct_data = db.struct_data(struct_def);
|
||||
|
||||
let lit_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
|
||||
let missed_fields: Vec<Name> = struct_def
|
||||
.fields(db)
|
||||
let missed_fields: Vec<Name> = struct_data
|
||||
.variant_data
|
||||
.fields()
|
||||
.iter()
|
||||
.filter_map(|f| {
|
||||
let name = f.name(db);
|
||||
.filter_map(|(_f, d)| {
|
||||
let name = d.name.clone();
|
||||
if lit_fields.contains(&name) {
|
||||
None
|
||||
} else {
|
||||
|
@ -89,7 +94,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
|||
if missed_fields.is_empty() {
|
||||
return;
|
||||
}
|
||||
let source_map = self.func.body_source_map(db);
|
||||
let (_, source_map) = db.body_with_source_map(self.func.into());
|
||||
|
||||
if let Some(source_ptr) = source_map.expr_syntax(id) {
|
||||
if let Some(expr) = source_ptr.value.a() {
|
||||
|
@ -121,20 +126,20 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
|||
|
||||
let std_result_path = known::std_result_result();
|
||||
|
||||
let resolver = self.func.id.resolver(db);
|
||||
let resolver = self.func.resolver(db);
|
||||
let std_result_enum = match resolver.resolve_known_enum(db, &std_result_path) {
|
||||
Some(it) => it,
|
||||
_ => return,
|
||||
};
|
||||
|
||||
let std_result_ctor = TypeCtor::Adt(Adt::Enum(std_result_enum.into()));
|
||||
let std_result_ctor = TypeCtor::Adt(AdtId::EnumId(std_result_enum));
|
||||
let params = match &mismatch.expected {
|
||||
Ty::Apply(ApplicationTy { ctor, parameters }) if ctor == &std_result_ctor => parameters,
|
||||
_ => return,
|
||||
};
|
||||
|
||||
if params.len() == 2 && ¶ms[0] == &mismatch.actual {
|
||||
let source_map = self.func.body_source_map(db);
|
||||
let (_, source_map) = db.body_with_source_map(self.func.into());
|
||||
|
||||
if let Some(source_ptr) = source_map.expr_syntax(id) {
|
||||
if let Some(expr) = source_ptr.value.a() {
|
|
@ -22,11 +22,13 @@ use ena::unify::{InPlaceUnificationTable, NoError, UnifyKey, UnifyValue};
|
|||
use rustc_hash::FxHashMap;
|
||||
|
||||
use hir_def::{
|
||||
body::Body,
|
||||
data::{ConstData, FunctionData},
|
||||
path::known,
|
||||
expr::{BindingAnnotation, ExprId, PatId},
|
||||
path::{known, Path},
|
||||
resolver::{HasResolver, Resolver, TypeNs},
|
||||
type_ref::{Mutability, TypeRef},
|
||||
AdtId, DefWithBodyId,
|
||||
AdtId, AssocItemId, DefWithBodyId, FunctionId, StructFieldId, TypeAliasId, VariantId,
|
||||
};
|
||||
use hir_expand::{diagnostics::DiagnosticSink, name};
|
||||
use ra_arena::map::ArenaMap;
|
||||
|
@ -34,22 +36,16 @@ use ra_prof::profile;
|
|||
use test_utils::tested_by;
|
||||
|
||||
use super::{
|
||||
lower,
|
||||
primitive::{FloatTy, IntTy},
|
||||
traits::{Guidance, Obligation, ProjectionPredicate, Solution},
|
||||
ApplicationTy, InEnvironment, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypableDef,
|
||||
TypeCtor, TypeWalk, Uncertain,
|
||||
};
|
||||
use crate::{
|
||||
code_model::TypeAlias,
|
||||
db::HirDatabase,
|
||||
expr::{BindingAnnotation, Body, ExprId, PatId},
|
||||
ty::infer::diagnostics::InferenceDiagnostic,
|
||||
Adt, AssocItem, DefWithBody, FloatTy, Function, IntTy, Path, StructField, Trait, VariantDef,
|
||||
ApplicationTy, InEnvironment, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypeCtor,
|
||||
TypeWalk, Uncertain,
|
||||
};
|
||||
use crate::{db::HirDatabase, infer::diagnostics::InferenceDiagnostic};
|
||||
|
||||
macro_rules! ty_app {
|
||||
($ctor:pat, $param:pat) => {
|
||||
crate::ty::Ty::Apply(crate::ty::ApplicationTy { ctor: $ctor, parameters: $param })
|
||||
crate::Ty::Apply(crate::ApplicationTy { ctor: $ctor, parameters: $param })
|
||||
};
|
||||
($ctor:pat) => {
|
||||
ty_app!($ctor, _)
|
||||
|
@ -63,15 +59,15 @@ mod pat;
|
|||
mod coerce;
|
||||
|
||||
/// The entry point of type inference.
|
||||
pub fn infer_query(db: &impl HirDatabase, def: DefWithBody) -> Arc<InferenceResult> {
|
||||
pub fn infer_query(db: &impl HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
|
||||
let _p = profile("infer_query");
|
||||
let resolver = DefWithBodyId::from(def).resolver(db);
|
||||
let resolver = def.resolver(db);
|
||||
let mut ctx = InferenceContext::new(db, def, resolver);
|
||||
|
||||
match &def {
|
||||
DefWithBody::Const(c) => ctx.collect_const(&db.const_data(c.id)),
|
||||
DefWithBody::Function(f) => ctx.collect_fn(&db.function_data(f.id)),
|
||||
DefWithBody::Static(s) => ctx.collect_const(&db.static_data(s.id)),
|
||||
match def {
|
||||
DefWithBodyId::ConstId(c) => ctx.collect_const(&db.const_data(c)),
|
||||
DefWithBodyId::FunctionId(f) => ctx.collect_fn(&db.function_data(f)),
|
||||
DefWithBodyId::StaticId(s) => ctx.collect_const(&db.static_data(s)),
|
||||
}
|
||||
|
||||
ctx.infer_body();
|
||||
|
@ -122,50 +118,50 @@ pub struct TypeMismatch {
|
|||
#[derive(Clone, PartialEq, Eq, Debug, Default)]
|
||||
pub struct InferenceResult {
|
||||
/// For each method call expr, records the function it resolves to.
|
||||
method_resolutions: FxHashMap<ExprId, Function>,
|
||||
method_resolutions: FxHashMap<ExprId, FunctionId>,
|
||||
/// For each field access expr, records the field it resolves to.
|
||||
field_resolutions: FxHashMap<ExprId, StructField>,
|
||||
field_resolutions: FxHashMap<ExprId, StructFieldId>,
|
||||
/// For each field in record literal, records the field it resolves to.
|
||||
record_field_resolutions: FxHashMap<ExprId, StructField>,
|
||||
record_field_resolutions: FxHashMap<ExprId, StructFieldId>,
|
||||
/// For each struct literal, records the variant it resolves to.
|
||||
variant_resolutions: FxHashMap<ExprOrPatId, VariantDef>,
|
||||
variant_resolutions: FxHashMap<ExprOrPatId, VariantId>,
|
||||
/// For each associated item record what it resolves to
|
||||
assoc_resolutions: FxHashMap<ExprOrPatId, AssocItem>,
|
||||
assoc_resolutions: FxHashMap<ExprOrPatId, AssocItemId>,
|
||||
diagnostics: Vec<InferenceDiagnostic>,
|
||||
pub(super) type_of_expr: ArenaMap<ExprId, Ty>,
|
||||
pub(super) type_of_pat: ArenaMap<PatId, Ty>,
|
||||
pub type_of_expr: ArenaMap<ExprId, Ty>,
|
||||
pub type_of_pat: ArenaMap<PatId, Ty>,
|
||||
pub(super) type_mismatches: ArenaMap<ExprId, TypeMismatch>,
|
||||
}
|
||||
|
||||
impl InferenceResult {
|
||||
pub fn method_resolution(&self, expr: ExprId) -> Option<Function> {
|
||||
pub fn method_resolution(&self, expr: ExprId) -> Option<FunctionId> {
|
||||
self.method_resolutions.get(&expr).copied()
|
||||
}
|
||||
pub fn field_resolution(&self, expr: ExprId) -> Option<StructField> {
|
||||
pub fn field_resolution(&self, expr: ExprId) -> Option<StructFieldId> {
|
||||
self.field_resolutions.get(&expr).copied()
|
||||
}
|
||||
pub fn record_field_resolution(&self, expr: ExprId) -> Option<StructField> {
|
||||
pub fn record_field_resolution(&self, expr: ExprId) -> Option<StructFieldId> {
|
||||
self.record_field_resolutions.get(&expr).copied()
|
||||
}
|
||||
pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantDef> {
|
||||
pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantId> {
|
||||
self.variant_resolutions.get(&id.into()).copied()
|
||||
}
|
||||
pub fn variant_resolution_for_pat(&self, id: PatId) -> Option<VariantDef> {
|
||||
pub fn variant_resolution_for_pat(&self, id: PatId) -> Option<VariantId> {
|
||||
self.variant_resolutions.get(&id.into()).copied()
|
||||
}
|
||||
pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option<AssocItem> {
|
||||
pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option<AssocItemId> {
|
||||
self.assoc_resolutions.get(&id.into()).copied()
|
||||
}
|
||||
pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<AssocItem> {
|
||||
pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<AssocItemId> {
|
||||
self.assoc_resolutions.get(&id.into()).copied()
|
||||
}
|
||||
pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch> {
|
||||
self.type_mismatches.get(expr)
|
||||
}
|
||||
pub(crate) fn add_diagnostics(
|
||||
pub fn add_diagnostics(
|
||||
&self,
|
||||
db: &impl HirDatabase,
|
||||
owner: Function,
|
||||
owner: FunctionId,
|
||||
sink: &mut DiagnosticSink,
|
||||
) {
|
||||
self.diagnostics.iter().for_each(|it| it.add_to(db, owner, sink))
|
||||
|
@ -192,7 +188,7 @@ impl Index<PatId> for InferenceResult {
|
|||
#[derive(Clone, Debug)]
|
||||
struct InferenceContext<'a, D: HirDatabase> {
|
||||
db: &'a D,
|
||||
owner: DefWithBody,
|
||||
owner: DefWithBodyId,
|
||||
body: Arc<Body>,
|
||||
resolver: Resolver,
|
||||
var_unification_table: InPlaceUnificationTable<TypeVarId>,
|
||||
|
@ -210,13 +206,13 @@ struct InferenceContext<'a, D: HirDatabase> {
|
|||
}
|
||||
|
||||
impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||
fn new(db: &'a D, owner: DefWithBody, resolver: Resolver) -> Self {
|
||||
fn new(db: &'a D, owner: DefWithBodyId, resolver: Resolver) -> Self {
|
||||
InferenceContext {
|
||||
result: InferenceResult::default(),
|
||||
var_unification_table: InPlaceUnificationTable::new(),
|
||||
obligations: Vec::default(),
|
||||
return_ty: Ty::Unknown, // set in collect_fn_signature
|
||||
trait_env: lower::trait_env(db, &resolver),
|
||||
trait_env: TraitEnvironment::lower(db, &resolver),
|
||||
coerce_unsized_map: Self::init_coerce_unsized_map(db, &resolver),
|
||||
db,
|
||||
owner,
|
||||
|
@ -244,20 +240,20 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
self.result.type_of_expr.insert(expr, ty);
|
||||
}
|
||||
|
||||
fn write_method_resolution(&mut self, expr: ExprId, func: Function) {
|
||||
fn write_method_resolution(&mut self, expr: ExprId, func: FunctionId) {
|
||||
self.result.method_resolutions.insert(expr, func);
|
||||
}
|
||||
|
||||
fn write_field_resolution(&mut self, expr: ExprId, field: StructField) {
|
||||
fn write_field_resolution(&mut self, expr: ExprId, field: StructFieldId) {
|
||||
self.result.field_resolutions.insert(expr, field);
|
||||
}
|
||||
|
||||
fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantDef) {
|
||||
fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantId) {
|
||||
self.result.variant_resolutions.insert(id, variant);
|
||||
}
|
||||
|
||||
fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: AssocItem) {
|
||||
self.result.assoc_resolutions.insert(id, item);
|
||||
fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: AssocItemId) {
|
||||
self.result.assoc_resolutions.insert(id, item.into());
|
||||
}
|
||||
|
||||
fn write_pat_ty(&mut self, pat: PatId, ty: Ty) {
|
||||
|
@ -515,51 +511,28 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
})
|
||||
}
|
||||
|
||||
fn resolve_variant(&mut self, path: Option<&Path>) -> (Ty, Option<VariantDef>) {
|
||||
fn resolve_variant(&mut self, path: Option<&Path>) -> (Ty, Option<VariantId>) {
|
||||
let path = match path {
|
||||
Some(path) => path,
|
||||
None => return (Ty::Unknown, None),
|
||||
};
|
||||
let resolver = &self.resolver;
|
||||
let def: TypableDef =
|
||||
// FIXME: this should resolve assoc items as well, see this example:
|
||||
// https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521
|
||||
match resolver.resolve_path_in_type_ns_fully(self.db, &path) {
|
||||
Some(TypeNs::AdtId(AdtId::StructId(it))) => it.into(),
|
||||
Some(TypeNs::AdtId(AdtId::UnionId(it))) => it.into(),
|
||||
Some(TypeNs::AdtSelfType(adt)) => adt.into(),
|
||||
Some(TypeNs::EnumVariantId(it)) => it.into(),
|
||||
Some(TypeNs::TypeAliasId(it)) => it.into(),
|
||||
|
||||
Some(TypeNs::SelfType(_)) |
|
||||
Some(TypeNs::GenericParam(_)) |
|
||||
Some(TypeNs::BuiltinType(_)) |
|
||||
Some(TypeNs::TraitId(_)) |
|
||||
Some(TypeNs::AdtId(AdtId::EnumId(_))) |
|
||||
None => {
|
||||
return (Ty::Unknown, None)
|
||||
}
|
||||
};
|
||||
// FIXME remove the duplication between here and `Ty::from_path`?
|
||||
let substs = Ty::substs_from_path(self.db, resolver, path, def);
|
||||
match def {
|
||||
TypableDef::Adt(Adt::Struct(s)) => {
|
||||
let ty = s.ty(self.db);
|
||||
// FIXME: this should resolve assoc items as well, see this example:
|
||||
// https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521
|
||||
match resolver.resolve_path_in_type_ns_fully(self.db, &path) {
|
||||
Some(TypeNs::AdtId(AdtId::StructId(strukt))) => {
|
||||
let substs = Ty::substs_from_path(self.db, resolver, path, strukt.into());
|
||||
let ty = self.db.ty(strukt.into());
|
||||
let ty = self.insert_type_vars(ty.apply_substs(substs));
|
||||
(ty, Some(s.into()))
|
||||
(ty, Some(strukt.into()))
|
||||
}
|
||||
TypableDef::EnumVariant(var) => {
|
||||
let ty = var.parent_enum(self.db).ty(self.db);
|
||||
Some(TypeNs::EnumVariantId(var)) => {
|
||||
let substs = Ty::substs_from_path(self.db, resolver, path, var.into());
|
||||
let ty = self.db.ty(var.parent.into());
|
||||
let ty = self.insert_type_vars(ty.apply_substs(substs));
|
||||
(ty, Some(var.into()))
|
||||
}
|
||||
TypableDef::Adt(Adt::Enum(_))
|
||||
| TypableDef::Adt(Adt::Union(_))
|
||||
| TypableDef::TypeAlias(_)
|
||||
| TypableDef::Function(_)
|
||||
| TypableDef::Const(_)
|
||||
| TypableDef::Static(_)
|
||||
| TypableDef::BuiltinType(_) => (Ty::Unknown, None),
|
||||
Some(_) | None => (Ty::Unknown, None),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -581,28 +554,28 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
self.infer_expr(self.body.body_expr, &Expectation::has_type(self.return_ty.clone()));
|
||||
}
|
||||
|
||||
fn resolve_into_iter_item(&self) -> Option<TypeAlias> {
|
||||
fn resolve_into_iter_item(&self) -> Option<TypeAliasId> {
|
||||
let path = known::std_iter_into_iterator();
|
||||
let trait_: Trait = self.resolver.resolve_known_trait(self.db, &path)?.into();
|
||||
trait_.associated_type_by_name(self.db, &name::ITEM_TYPE)
|
||||
let trait_ = self.resolver.resolve_known_trait(self.db, &path)?;
|
||||
self.db.trait_data(trait_).associated_type_by_name(&name::ITEM_TYPE)
|
||||
}
|
||||
|
||||
fn resolve_ops_try_ok(&self) -> Option<TypeAlias> {
|
||||
fn resolve_ops_try_ok(&self) -> Option<TypeAliasId> {
|
||||
let path = known::std_ops_try();
|
||||
let trait_: Trait = self.resolver.resolve_known_trait(self.db, &path)?.into();
|
||||
trait_.associated_type_by_name(self.db, &name::OK_TYPE)
|
||||
let trait_ = self.resolver.resolve_known_trait(self.db, &path)?;
|
||||
self.db.trait_data(trait_).associated_type_by_name(&name::OK_TYPE)
|
||||
}
|
||||
|
||||
fn resolve_future_future_output(&self) -> Option<TypeAlias> {
|
||||
fn resolve_future_future_output(&self) -> Option<TypeAliasId> {
|
||||
let path = known::std_future_future();
|
||||
let trait_: Trait = self.resolver.resolve_known_trait(self.db, &path)?.into();
|
||||
trait_.associated_type_by_name(self.db, &name::OUTPUT_TYPE)
|
||||
let trait_ = self.resolver.resolve_known_trait(self.db, &path)?;
|
||||
self.db.trait_data(trait_).associated_type_by_name(&name::OUTPUT_TYPE)
|
||||
}
|
||||
|
||||
fn resolve_boxed_box(&self) -> Option<Adt> {
|
||||
fn resolve_boxed_box(&self) -> Option<AdtId> {
|
||||
let path = known::std_boxed_box();
|
||||
let struct_ = self.resolver.resolve_known_struct(self.db, &path)?;
|
||||
Some(Adt::Struct(struct_.into()))
|
||||
Some(struct_.into())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -720,9 +693,10 @@ impl Expectation {
|
|||
}
|
||||
|
||||
mod diagnostics {
|
||||
use hir_def::{expr::ExprId, FunctionId, HasSource, Lookup};
|
||||
use hir_expand::diagnostics::DiagnosticSink;
|
||||
|
||||
use crate::{db::HirDatabase, diagnostics::NoSuchField, expr::ExprId, Function, HasSource};
|
||||
use crate::{db::HirDatabase, diagnostics::NoSuchField};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub(super) enum InferenceDiagnostic {
|
||||
|
@ -733,13 +707,14 @@ mod diagnostics {
|
|||
pub(super) fn add_to(
|
||||
&self,
|
||||
db: &impl HirDatabase,
|
||||
owner: Function,
|
||||
owner: FunctionId,
|
||||
sink: &mut DiagnosticSink,
|
||||
) {
|
||||
match self {
|
||||
InferenceDiagnostic::NoSuchField { expr, field } => {
|
||||
let file = owner.source(db).file_id;
|
||||
let field = owner.body_source_map(db).field_syntax(*expr, *field);
|
||||
let file = owner.lookup(db).source(db).file_id;
|
||||
let (_, source_map) = db.body_with_source_map(owner.into());
|
||||
let field = source_map.field_syntax(*expr, *field);
|
||||
sink.push(NoSuchField { file, field })
|
||||
}
|
||||
}
|
|
@ -4,17 +4,13 @@
|
|||
//!
|
||||
//! See: https://doc.rust-lang.org/nomicon/coercions.html
|
||||
|
||||
use hir_def::{lang_item::LangItemTarget, resolver::Resolver};
|
||||
use hir_def::{lang_item::LangItemTarget, resolver::Resolver, type_ref::Mutability, AdtId};
|
||||
use rustc_hash::FxHashMap;
|
||||
use test_utils::tested_by;
|
||||
|
||||
use crate::{
|
||||
db::HirDatabase,
|
||||
ty::{autoderef, Substs, Ty, TypeCtor, TypeWalk},
|
||||
Adt, Mutability,
|
||||
};
|
||||
use crate::{autoderef, db::HirDatabase, ImplTy, Substs, Ty, TypeCtor, TypeWalk};
|
||||
|
||||
use super::{InferTy, InferenceContext, TypeVarValue};
|
||||
use super::{InEnvironment, InferTy, InferenceContext, TypeVarValue};
|
||||
|
||||
impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||
/// Unify two types, but may coerce the first one to the second one
|
||||
|
@ -57,9 +53,13 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
|
||||
impls
|
||||
.iter()
|
||||
.filter_map(|impl_block| {
|
||||
.filter_map(|&impl_id| {
|
||||
let trait_ref = match db.impl_ty(impl_id) {
|
||||
ImplTy::TraitRef(it) => it,
|
||||
ImplTy::Inherent(_) => return None,
|
||||
};
|
||||
|
||||
// `CoerseUnsized` has one generic parameter for the target type.
|
||||
let trait_ref = impl_block.target_trait_ref(db)?;
|
||||
let cur_from_ty = trait_ref.substs.0.get(0)?;
|
||||
let cur_to_ty = trait_ref.substs.0.get(1)?;
|
||||
|
||||
|
@ -242,11 +242,11 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
// - T is not part of the type of any other fields
|
||||
// - Bar<T>: Unsize<Bar<U>>, if the last field of Foo has type Bar<T>
|
||||
(
|
||||
ty_app!(TypeCtor::Adt(Adt::Struct(struct1)), st1),
|
||||
ty_app!(TypeCtor::Adt(Adt::Struct(struct2)), st2),
|
||||
ty_app!(TypeCtor::Adt(AdtId::StructId(struct1)), st1),
|
||||
ty_app!(TypeCtor::Adt(AdtId::StructId(struct2)), st2),
|
||||
) if struct1 == struct2 => {
|
||||
let field_tys = self.db.field_types(struct1.id.into());
|
||||
let struct_data = self.db.struct_data(struct1.id.0);
|
||||
let field_tys = self.db.field_types((*struct1).into());
|
||||
let struct_data = self.db.struct_data(*struct1);
|
||||
|
||||
let mut fields = struct_data.variant_data.fields().iter();
|
||||
let (last_field_id, _data) = fields.next_back()?;
|
||||
|
@ -320,9 +320,14 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
let canonicalized = self.canonicalizer().canonicalize_ty(from_ty.clone());
|
||||
let to_ty = self.resolve_ty_shallow(&to_ty);
|
||||
// FIXME: Auto DerefMut
|
||||
for derefed_ty in
|
||||
autoderef::autoderef(self.db, &self.resolver.clone(), canonicalized.value.clone())
|
||||
{
|
||||
for derefed_ty in autoderef::autoderef(
|
||||
self.db,
|
||||
self.resolver.krate(),
|
||||
InEnvironment {
|
||||
value: canonicalized.value.clone(),
|
||||
environment: self.trait_env.clone(),
|
||||
},
|
||||
) {
|
||||
let derefed_ty = canonicalized.decanonicalize_ty(derefed_ty.value);
|
||||
match (&*self.resolve_ty_shallow(&derefed_ty), &*to_ty) {
|
||||
// Stop when constructor matches.
|
|
@ -5,21 +5,18 @@ use std::sync::Arc;
|
|||
|
||||
use hir_def::{
|
||||
builtin_type::Signedness,
|
||||
expr::{Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp},
|
||||
generics::GenericParams,
|
||||
path::{GenericArg, GenericArgs},
|
||||
resolver::resolver_for_expr,
|
||||
AdtId, ContainerId, Lookup, StructFieldId,
|
||||
};
|
||||
use hir_expand::name;
|
||||
use hir_expand::name::{self, Name};
|
||||
|
||||
use crate::{
|
||||
db::HirDatabase,
|
||||
expr::{Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp},
|
||||
ty::{
|
||||
autoderef, method_resolution, op, CallableDef, InferTy, IntTy, Mutability, Namespace,
|
||||
Obligation, ProjectionPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, TypeWalk,
|
||||
Uncertain,
|
||||
},
|
||||
Adt, Name,
|
||||
autoderef, db::HirDatabase, method_resolution, op, traits::InEnvironment, utils::variant_data,
|
||||
CallableDef, InferTy, IntTy, Mutability, Obligation, ProjectionPredicate, ProjectionTy, Substs,
|
||||
TraitRef, Ty, TypeCtor, TypeWalk, Uncertain,
|
||||
};
|
||||
|
||||
use super::{BindingMode, Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch};
|
||||
|
@ -136,8 +133,10 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
TypeCtor::FnPtr { num_args: sig_tys.len() as u16 - 1 },
|
||||
Substs(sig_tys.into()),
|
||||
);
|
||||
let closure_ty =
|
||||
Ty::apply_one(TypeCtor::Closure { def: self.owner, expr: tgt_expr }, sig_ty);
|
||||
let closure_ty = Ty::apply_one(
|
||||
TypeCtor::Closure { def: self.owner.into(), expr: tgt_expr },
|
||||
sig_ty,
|
||||
);
|
||||
|
||||
// Eagerly try to relate the closure type with the expected
|
||||
// type, otherwise we often won't have enough information to
|
||||
|
@ -216,22 +215,26 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
let substs = ty.substs().unwrap_or_else(Substs::empty);
|
||||
let field_types =
|
||||
def_id.map(|it| self.db.field_types(it.into())).unwrap_or_default();
|
||||
let variant_data = def_id.map(|it| variant_data(self.db, it));
|
||||
for (field_idx, field) in fields.iter().enumerate() {
|
||||
let field_def = def_id.and_then(|it| match it.field(self.db, &field.name) {
|
||||
Some(field) => Some(field),
|
||||
None => {
|
||||
self.push_diagnostic(InferenceDiagnostic::NoSuchField {
|
||||
expr: tgt_expr,
|
||||
field: field_idx,
|
||||
});
|
||||
None
|
||||
}
|
||||
});
|
||||
let field_def =
|
||||
variant_data.as_ref().and_then(|it| match it.field(&field.name) {
|
||||
Some(local_id) => {
|
||||
Some(StructFieldId { parent: def_id.unwrap(), local_id })
|
||||
}
|
||||
None => {
|
||||
self.push_diagnostic(InferenceDiagnostic::NoSuchField {
|
||||
expr: tgt_expr,
|
||||
field: field_idx,
|
||||
});
|
||||
None
|
||||
}
|
||||
});
|
||||
if let Some(field_def) = field_def {
|
||||
self.result.record_field_resolutions.insert(field.expr, field_def);
|
||||
}
|
||||
let field_ty = field_def
|
||||
.map_or(Ty::Unknown, |it| field_types[it.id].clone())
|
||||
.map_or(Ty::Unknown, |it| field_types[it.local_id].clone())
|
||||
.subst(&substs);
|
||||
self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty));
|
||||
}
|
||||
|
@ -245,20 +248,28 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
let canonicalized = self.canonicalizer().canonicalize_ty(receiver_ty);
|
||||
let ty = autoderef::autoderef(
|
||||
self.db,
|
||||
&self.resolver.clone(),
|
||||
canonicalized.value.clone(),
|
||||
self.resolver.krate(),
|
||||
InEnvironment {
|
||||
value: canonicalized.value.clone(),
|
||||
environment: self.trait_env.clone(),
|
||||
},
|
||||
)
|
||||
.find_map(|derefed_ty| match canonicalized.decanonicalize_ty(derefed_ty.value) {
|
||||
Ty::Apply(a_ty) => match a_ty.ctor {
|
||||
TypeCtor::Tuple { .. } => name
|
||||
.as_tuple_index()
|
||||
.and_then(|idx| a_ty.parameters.0.get(idx).cloned()),
|
||||
TypeCtor::Adt(Adt::Struct(s)) => s.field(self.db, name).map(|field| {
|
||||
self.write_field_resolution(tgt_expr, field);
|
||||
self.db.field_types(s.id.into())[field.id]
|
||||
.clone()
|
||||
.subst(&a_ty.parameters)
|
||||
}),
|
||||
TypeCtor::Adt(AdtId::StructId(s)) => {
|
||||
self.db.struct_data(s).variant_data.field(name).map(|local_id| {
|
||||
let field = StructFieldId { parent: s.into(), local_id }.into();
|
||||
self.write_field_resolution(tgt_expr, field);
|
||||
self.db.field_types(s.into())[field.local_id]
|
||||
.clone()
|
||||
.subst(&a_ty.parameters)
|
||||
})
|
||||
}
|
||||
// FIXME:
|
||||
TypeCtor::Adt(AdtId::UnionId(_)) => None,
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
|
@ -337,16 +348,25 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
Expr::UnaryOp { expr, op } => {
|
||||
let inner_ty = self.infer_expr(*expr, &Expectation::none());
|
||||
match op {
|
||||
UnaryOp::Deref => {
|
||||
let canonicalized = self.canonicalizer().canonicalize_ty(inner_ty);
|
||||
if let Some(derefed_ty) =
|
||||
autoderef::deref(self.db, &self.resolver, &canonicalized.value)
|
||||
{
|
||||
canonicalized.decanonicalize_ty(derefed_ty.value)
|
||||
} else {
|
||||
Ty::Unknown
|
||||
UnaryOp::Deref => match self.resolver.krate() {
|
||||
Some(krate) => {
|
||||
let canonicalized = self.canonicalizer().canonicalize_ty(inner_ty);
|
||||
match autoderef::deref(
|
||||
self.db,
|
||||
krate,
|
||||
InEnvironment {
|
||||
value: &canonicalized.value,
|
||||
environment: self.trait_env.clone(),
|
||||
},
|
||||
) {
|
||||
Some(derefed_ty) => {
|
||||
canonicalized.decanonicalize_ty(derefed_ty.value)
|
||||
}
|
||||
None => Ty::Unknown,
|
||||
}
|
||||
}
|
||||
}
|
||||
None => Ty::Unknown,
|
||||
},
|
||||
UnaryOp::Neg => {
|
||||
match &inner_ty {
|
||||
Ty::Apply(a_ty) => match a_ty.ctor {
|
||||
|
@ -538,11 +558,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
Some((ty, func)) => {
|
||||
let ty = canonicalized_receiver.decanonicalize_ty(ty);
|
||||
self.write_method_resolution(tgt_expr, func);
|
||||
(
|
||||
ty,
|
||||
self.db.type_for_def(func.into(), Namespace::Values),
|
||||
Some(self.db.generic_params(func.id.into())),
|
||||
)
|
||||
(ty, self.db.value_ty(func.into()), Some(self.db.generic_params(func.into())))
|
||||
}
|
||||
None => (receiver_ty, Ty::Unknown, None),
|
||||
};
|
||||
|
@ -648,18 +664,21 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
}
|
||||
// add obligation for trait implementation, if this is a trait method
|
||||
match def {
|
||||
CallableDef::Function(f) => {
|
||||
if let Some(trait_) = f.parent_trait(self.db) {
|
||||
CallableDef::FunctionId(f) => {
|
||||
if let ContainerId::TraitId(trait_) = f.lookup(self.db).container {
|
||||
// construct a TraitDef
|
||||
let substs = a_ty.parameters.prefix(
|
||||
self.db
|
||||
.generic_params(trait_.id.into())
|
||||
.generic_params(trait_.into())
|
||||
.count_params_including_parent(),
|
||||
);
|
||||
self.obligations.push(Obligation::Trait(TraitRef { trait_, substs }));
|
||||
self.obligations.push(Obligation::Trait(TraitRef {
|
||||
trait_: trait_.into(),
|
||||
substs,
|
||||
}));
|
||||
}
|
||||
}
|
||||
CallableDef::Struct(_) | CallableDef::EnumVariant(_) => {}
|
||||
CallableDef::StructId(_) | CallableDef::EnumVariantId(_) => {}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -3,15 +3,16 @@
|
|||
use std::iter::repeat;
|
||||
use std::sync::Arc;
|
||||
|
||||
use hir_def::{
|
||||
expr::{BindingAnnotation, Pat, PatId, RecordFieldPat},
|
||||
path::Path,
|
||||
type_ref::Mutability,
|
||||
};
|
||||
use hir_expand::name::Name;
|
||||
use test_utils::tested_by;
|
||||
|
||||
use super::{BindingMode, InferenceContext};
|
||||
use crate::{
|
||||
db::HirDatabase,
|
||||
expr::{BindingAnnotation, Pat, PatId, RecordFieldPat},
|
||||
ty::{Mutability, Substs, Ty, TypeCtor, TypeWalk},
|
||||
Name, Path,
|
||||
};
|
||||
use crate::{db::HirDatabase, utils::variant_data, Substs, Ty, TypeCtor, TypeWalk};
|
||||
|
||||
impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||
fn infer_tuple_struct_pat(
|
||||
|
@ -22,16 +23,18 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
default_bm: BindingMode,
|
||||
) -> Ty {
|
||||
let (ty, def) = self.resolve_variant(path);
|
||||
|
||||
let var_data = def.map(|it| variant_data(self.db, it));
|
||||
self.unify(&ty, expected);
|
||||
|
||||
let substs = ty.substs().unwrap_or_else(Substs::empty);
|
||||
|
||||
let field_tys = def.map(|it| self.db.field_types(it.into())).unwrap_or_default();
|
||||
|
||||
for (i, &subpat) in subpats.iter().enumerate() {
|
||||
let expected_ty = def
|
||||
.and_then(|d| d.field(self.db, &Name::new_tuple_field(i)))
|
||||
.map_or(Ty::Unknown, |field| field_tys[field.id].clone())
|
||||
let expected_ty = var_data
|
||||
.as_ref()
|
||||
.and_then(|d| d.field(&Name::new_tuple_field(i)))
|
||||
.map_or(Ty::Unknown, |field| field_tys[field].clone())
|
||||
.subst(&substs);
|
||||
let expected_ty = self.normalize_associated_types_in(expected_ty);
|
||||
self.infer_pat(subpat, &expected_ty, default_bm);
|
||||
|
@ -49,6 +52,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
id: PatId,
|
||||
) -> Ty {
|
||||
let (ty, def) = self.resolve_variant(path);
|
||||
let var_data = def.map(|it| variant_data(self.db, it));
|
||||
if let Some(variant) = def {
|
||||
self.write_variant_resolution(id.into(), variant);
|
||||
}
|
||||
|
@ -59,10 +63,9 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
|
||||
let field_tys = def.map(|it| self.db.field_types(it.into())).unwrap_or_default();
|
||||
for subpat in subpats {
|
||||
let matching_field = def.and_then(|it| it.field(self.db, &subpat.name));
|
||||
let expected_ty = matching_field
|
||||
.map_or(Ty::Unknown, |field| field_tys[field.id].clone())
|
||||
.subst(&substs);
|
||||
let matching_field = var_data.as_ref().and_then(|it| it.field(&subpat.name));
|
||||
let expected_ty =
|
||||
matching_field.map_or(Ty::Unknown, |field| field_tys[field].clone()).subst(&substs);
|
||||
let expected_ty = self.normalize_associated_types_in(expected_ty);
|
||||
self.infer_pat(subpat.pat, &expected_ty, default_bm);
|
||||
}
|
|
@ -1,15 +1,13 @@
|
|||
//! Path expression resolution.
|
||||
|
||||
use hir_def::{
|
||||
path::PathSegment,
|
||||
path::{Path, PathKind, PathSegment},
|
||||
resolver::{ResolveValueResult, Resolver, TypeNs, ValueNs},
|
||||
AssocItemId, ContainerId, Lookup,
|
||||
};
|
||||
use hir_expand::name::Name;
|
||||
|
||||
use crate::{
|
||||
db::HirDatabase,
|
||||
ty::{method_resolution, Namespace, Substs, Ty, TypableDef, TypeWalk},
|
||||
AssocItem, Container, Function, Name, Path,
|
||||
};
|
||||
use crate::{db::HirDatabase, method_resolution, Substs, Ty, TypeWalk, ValueTyDefId};
|
||||
|
||||
use super::{ExprOrPatId, InferenceContext, TraitRef};
|
||||
|
||||
|
@ -32,7 +30,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
path: &Path,
|
||||
id: ExprOrPatId,
|
||||
) -> Option<Ty> {
|
||||
let (value, self_subst) = if let crate::PathKind::Type(type_ref) = &path.kind {
|
||||
let (value, self_subst) = if let PathKind::Type(type_ref) = &path.kind {
|
||||
if path.segments.is_empty() {
|
||||
// This can't actually happen syntax-wise
|
||||
return None;
|
||||
|
@ -56,7 +54,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
}
|
||||
};
|
||||
|
||||
let typable: TypableDef = match value {
|
||||
let typable: ValueTyDefId = match value {
|
||||
ValueNs::LocalBinding(pat) => {
|
||||
let ty = self.result.type_of_pat.get(pat)?.clone();
|
||||
let ty = self.resolve_ty_as_possible(&mut vec![], ty);
|
||||
|
@ -69,11 +67,10 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
ValueNs::EnumVariantId(it) => it.into(),
|
||||
};
|
||||
|
||||
let mut ty = self.db.type_for_def(typable, Namespace::Values);
|
||||
let mut ty = self.db.value_ty(typable);
|
||||
if let Some(self_subst) = self_subst {
|
||||
ty = ty.subst(&self_subst);
|
||||
}
|
||||
|
||||
let substs = Ty::substs_from_path(self.db, &self.resolver, path, typable);
|
||||
let ty = ty.subst(&substs);
|
||||
Some(ty)
|
||||
|
@ -143,28 +140,35 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
id: ExprOrPatId,
|
||||
) -> Option<(ValueNs, Option<Substs>)> {
|
||||
let trait_ = trait_ref.trait_;
|
||||
let item = trait_.items(self.db).iter().copied().find_map(|item| match item {
|
||||
AssocItem::Function(func) => {
|
||||
if segment.name == func.name(self.db) {
|
||||
Some(AssocItem::Function(func))
|
||||
} else {
|
||||
None
|
||||
let item = self
|
||||
.db
|
||||
.trait_data(trait_)
|
||||
.items
|
||||
.iter()
|
||||
.map(|(_name, id)| (*id).into())
|
||||
.find_map(|item| match item {
|
||||
AssocItemId::FunctionId(func) => {
|
||||
if segment.name == self.db.function_data(func).name {
|
||||
Some(AssocItemId::FunctionId(func))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
AssocItem::Const(konst) => {
|
||||
if konst.name(self.db).map_or(false, |n| n == segment.name) {
|
||||
Some(AssocItem::Const(konst))
|
||||
} else {
|
||||
None
|
||||
AssocItemId::ConstId(konst) => {
|
||||
if self.db.const_data(konst).name.as_ref().map_or(false, |n| n == &segment.name)
|
||||
{
|
||||
Some(AssocItemId::ConstId(konst))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
AssocItem::TypeAlias(_) => None,
|
||||
})?;
|
||||
AssocItemId::TypeAliasId(_) => None,
|
||||
})?;
|
||||
let def = match item {
|
||||
AssocItem::Function(f) => ValueNs::FunctionId(f.id),
|
||||
AssocItem::Const(c) => ValueNs::ConstId(c.id),
|
||||
AssocItem::TypeAlias(_) => unreachable!(),
|
||||
AssocItemId::FunctionId(f) => ValueNs::FunctionId(f),
|
||||
AssocItemId::ConstId(c) => ValueNs::ConstId(c),
|
||||
AssocItemId::TypeAliasId(_) => unreachable!(),
|
||||
};
|
||||
let substs = Substs::build_for_def(self.db, item)
|
||||
.use_parent_substs(&trait_ref.substs)
|
||||
|
@ -194,16 +198,18 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
Some(name),
|
||||
method_resolution::LookupMode::Path,
|
||||
move |_ty, item| {
|
||||
let def = match item {
|
||||
AssocItem::Function(f) => ValueNs::FunctionId(f.id),
|
||||
AssocItem::Const(c) => ValueNs::ConstId(c.id),
|
||||
AssocItem::TypeAlias(_) => unreachable!(),
|
||||
let (def, container) = match item {
|
||||
AssocItemId::FunctionId(f) => {
|
||||
(ValueNs::FunctionId(f), f.lookup(self.db).container)
|
||||
}
|
||||
AssocItemId::ConstId(c) => (ValueNs::ConstId(c), c.lookup(self.db).container),
|
||||
AssocItemId::TypeAliasId(_) => unreachable!(),
|
||||
};
|
||||
let substs = match item.container(self.db) {
|
||||
Container::ImplBlock(_) => self.find_self_types(&def, ty.clone()),
|
||||
Container::Trait(t) => {
|
||||
let substs = match container {
|
||||
ContainerId::ImplId(_) => self.find_self_types(&def, ty.clone()),
|
||||
ContainerId::TraitId(trait_) => {
|
||||
// we're picking this method
|
||||
let trait_substs = Substs::build_for_def(self.db, t)
|
||||
let trait_substs = Substs::build_for_def(self.db, trait_)
|
||||
.push(ty.clone())
|
||||
.fill(std::iter::repeat_with(|| self.new_type_var()))
|
||||
.build();
|
||||
|
@ -212,37 +218,41 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
.fill_with_params()
|
||||
.build();
|
||||
self.obligations.push(super::Obligation::Trait(TraitRef {
|
||||
trait_: t,
|
||||
trait_,
|
||||
substs: trait_substs,
|
||||
}));
|
||||
Some(substs)
|
||||
}
|
||||
ContainerId::ModuleId(_) => None,
|
||||
};
|
||||
|
||||
self.write_assoc_resolution(id, item);
|
||||
self.write_assoc_resolution(id, item.into());
|
||||
Some((def, substs))
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn find_self_types(&self, def: &ValueNs, actual_def_ty: Ty) -> Option<Substs> {
|
||||
if let ValueNs::FunctionId(func) = def {
|
||||
let func = Function::from(*func);
|
||||
if let ValueNs::FunctionId(func) = *def {
|
||||
// We only do the infer if parent has generic params
|
||||
let gen = self.db.generic_params(func.id.into());
|
||||
let gen = self.db.generic_params(func.into());
|
||||
if gen.count_parent_params() == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
let impl_block = func.impl_block(self.db)?.target_ty(self.db);
|
||||
let impl_block_substs = impl_block.substs()?;
|
||||
let impl_id = match func.lookup(self.db).container {
|
||||
ContainerId::ImplId(it) => it,
|
||||
_ => return None,
|
||||
};
|
||||
let self_ty = self.db.impl_ty(impl_id).self_type().clone();
|
||||
let self_ty_substs = self_ty.substs()?;
|
||||
let actual_substs = actual_def_ty.substs()?;
|
||||
|
||||
let mut new_substs = vec![Ty::Unknown; gen.count_parent_params()];
|
||||
|
||||
// The following code *link up* the function actual parma type
|
||||
// and impl_block type param index
|
||||
impl_block_substs.iter().zip(actual_substs.iter()).for_each(|(param, pty)| {
|
||||
self_ty_substs.iter().zip(actual_substs.iter()).for_each(|(param, pty)| {
|
||||
if let Ty::Param { idx, .. } = param {
|
||||
if let Some(s) = new_substs.get_mut(*idx as usize) {
|
||||
*s = pty.clone();
|
|
@ -1,12 +1,10 @@
|
|||
//! Unification and canonicalization logic.
|
||||
|
||||
use super::{InferenceContext, Obligation};
|
||||
use crate::db::HirDatabase;
|
||||
use crate::ty::{
|
||||
Canonical, InEnvironment, InferTy, ProjectionPredicate, ProjectionTy, Substs, TraitRef, Ty,
|
||||
TypeWalk,
|
||||
use crate::{
|
||||
db::HirDatabase, utils::make_mut_slice, Canonical, InEnvironment, InferTy, ProjectionPredicate,
|
||||
ProjectionTy, Substs, TraitRef, Ty, TypeWalk,
|
||||
};
|
||||
use crate::util::make_mut_slice;
|
||||
|
||||
impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||
pub(super) fn canonicalizer<'b>(&'b mut self) -> Canonicalizer<'a, 'b, D>
|
1138
crates/ra_hir_ty/src/lib.rs
Normal file
1138
crates/ra_hir_ty/src/lib.rs
Normal file
File diff suppressed because it is too large
Load diff
|
@ -9,43 +9,30 @@ use std::iter;
|
|||
use std::sync::Arc;
|
||||
|
||||
use hir_def::{
|
||||
builtin_type::{BuiltinFloat, BuiltinInt, BuiltinType},
|
||||
builtin_type::BuiltinType,
|
||||
generics::WherePredicate,
|
||||
path::{GenericArg, PathSegment},
|
||||
path::{GenericArg, Path, PathKind, PathSegment},
|
||||
resolver::{HasResolver, Resolver, TypeNs},
|
||||
type_ref::{TypeBound, TypeRef},
|
||||
AdtId, GenericDefId, LocalStructFieldId, VariantId,
|
||||
AdtId, AstItemDef, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId,
|
||||
LocalStructFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, UnionId, VariantId,
|
||||
};
|
||||
use ra_arena::map::ArenaMap;
|
||||
use ra_db::CrateId;
|
||||
|
||||
use super::{
|
||||
FnSig, GenericPredicate, ProjectionPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor,
|
||||
TypeWalk,
|
||||
};
|
||||
use crate::{
|
||||
db::HirDatabase,
|
||||
ty::{
|
||||
primitive::{FloatTy, IntTy, Uncertain},
|
||||
Adt,
|
||||
primitive::{FloatTy, IntTy},
|
||||
utils::{
|
||||
all_super_traits, associated_type_by_name_including_super_traits, make_mut_slice,
|
||||
variant_data,
|
||||
},
|
||||
util::make_mut_slice,
|
||||
Const, Enum, EnumVariant, Function, GenericDef, ImplBlock, ModuleDef, Path, Static, Struct,
|
||||
Trait, TypeAlias, Union,
|
||||
FnSig, GenericPredicate, ImplTy, ProjectionPredicate, ProjectionTy, Substs, TraitEnvironment,
|
||||
TraitRef, Ty, TypeCtor, TypeWalk,
|
||||
};
|
||||
|
||||
// FIXME: this is only really used in `type_for_def`, which contains a bunch of
|
||||
// impossible cases. Perhaps we should recombine `TypeableDef` and `Namespace`
|
||||
// into a `AsTypeDef`, `AsValueDef` enums?
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub enum Namespace {
|
||||
Types,
|
||||
Values,
|
||||
// Note that only type inference uses this enum, and it doesn't care about macros.
|
||||
// Macro,
|
||||
}
|
||||
|
||||
impl Ty {
|
||||
pub(crate) fn from_hir(db: &impl HirDatabase, resolver: &Resolver, type_ref: &TypeRef) -> Self {
|
||||
pub fn from_hir(db: &impl HirDatabase, resolver: &Resolver, type_ref: &TypeRef) -> Self {
|
||||
match type_ref {
|
||||
TypeRef::Never => Ty::simple(TypeCtor::Never),
|
||||
TypeRef::Tuple(inner) => {
|
||||
|
@ -114,7 +101,7 @@ impl Ty {
|
|||
TypeRef::Path(path) => path,
|
||||
_ => return None,
|
||||
};
|
||||
if let crate::PathKind::Type(_) = &path.kind {
|
||||
if let PathKind::Type(_) = &path.kind {
|
||||
return None;
|
||||
}
|
||||
if path.segments.len() > 1 {
|
||||
|
@ -158,19 +145,16 @@ impl Ty {
|
|||
) -> Ty {
|
||||
let ty = match resolution {
|
||||
TypeNs::TraitId(trait_) => {
|
||||
let trait_ref = TraitRef::from_resolved_path(
|
||||
db,
|
||||
resolver,
|
||||
trait_.into(),
|
||||
resolved_segment,
|
||||
None,
|
||||
);
|
||||
let trait_ref =
|
||||
TraitRef::from_resolved_path(db, resolver, trait_, resolved_segment, None);
|
||||
return if remaining_segments.len() == 1 {
|
||||
let segment = &remaining_segments[0];
|
||||
match trait_ref
|
||||
.trait_
|
||||
.associated_type_by_name_including_super_traits(db, &segment.name)
|
||||
{
|
||||
let associated_ty = associated_type_by_name_including_super_traits(
|
||||
db,
|
||||
trait_ref.trait_,
|
||||
&segment.name,
|
||||
);
|
||||
match associated_ty {
|
||||
Some(associated_ty) => {
|
||||
// FIXME handle type parameters on the segment
|
||||
Ty::Projection(ProjectionTy {
|
||||
|
@ -195,8 +179,8 @@ impl Ty {
|
|||
let name = resolved_segment.name.clone();
|
||||
Ty::Param { idx, name }
|
||||
}
|
||||
TypeNs::SelfType(impl_block) => ImplBlock::from(impl_block).target_ty(db),
|
||||
TypeNs::AdtSelfType(adt) => Adt::from(adt).ty(db),
|
||||
TypeNs::SelfType(impl_id) => db.impl_ty(impl_id).self_type().clone(),
|
||||
TypeNs::AdtSelfType(adt) => db.ty(adt.into()),
|
||||
|
||||
TypeNs::AdtId(it) => Ty::from_hir_path_inner(db, resolver, resolved_segment, it.into()),
|
||||
TypeNs::BuiltinType(it) => {
|
||||
|
@ -214,7 +198,7 @@ impl Ty {
|
|||
|
||||
pub(crate) fn from_hir_path(db: &impl HirDatabase, resolver: &Resolver, path: &Path) -> Ty {
|
||||
// Resolve the path (in type namespace)
|
||||
if let crate::PathKind::Type(type_ref) = &path.kind {
|
||||
if let PathKind::Type(type_ref) = &path.kind {
|
||||
let ty = Ty::from_hir(db, resolver, &type_ref);
|
||||
let remaining_segments = &path.segments[..];
|
||||
return Ty::from_type_relative_path(db, resolver, ty, remaining_segments);
|
||||
|
@ -258,9 +242,9 @@ impl Ty {
|
|||
GenericPredicate::Implemented(tr) if tr.self_ty() == &self_ty => Some(tr.trait_),
|
||||
_ => None,
|
||||
});
|
||||
let traits = traits_from_env.flat_map(|t| t.all_super_traits(db));
|
||||
let traits = traits_from_env.flat_map(|t| all_super_traits(db, t));
|
||||
for t in traits {
|
||||
if let Some(associated_ty) = t.associated_type_by_name(db, &segment.name) {
|
||||
if let Some(associated_ty) = db.trait_data(t).associated_type_by_name(&segment.name) {
|
||||
let substs =
|
||||
Substs::build_for_def(db, t).push(self_ty.clone()).fill_with_unknown().build();
|
||||
// FIXME handle type parameters on the segment
|
||||
|
@ -274,27 +258,15 @@ impl Ty {
|
|||
db: &impl HirDatabase,
|
||||
resolver: &Resolver,
|
||||
segment: &PathSegment,
|
||||
typable: TypableDef,
|
||||
typable: TyDefId,
|
||||
) -> Ty {
|
||||
let ty = db.type_for_def(typable, Namespace::Types);
|
||||
let substs = Ty::substs_from_path_segment(db, resolver, segment, typable);
|
||||
ty.subst(&substs)
|
||||
}
|
||||
|
||||
pub(super) fn substs_from_path_segment(
|
||||
db: &impl HirDatabase,
|
||||
resolver: &Resolver,
|
||||
segment: &PathSegment,
|
||||
resolved: TypableDef,
|
||||
) -> Substs {
|
||||
let def_generic: Option<GenericDef> = match resolved {
|
||||
TypableDef::Function(func) => Some(func.into()),
|
||||
TypableDef::Adt(adt) => Some(adt.into()),
|
||||
TypableDef::EnumVariant(var) => Some(var.parent_enum(db).into()),
|
||||
TypableDef::TypeAlias(t) => Some(t.into()),
|
||||
TypableDef::Const(_) | TypableDef::Static(_) | TypableDef::BuiltinType(_) => None,
|
||||
let generic_def = match typable {
|
||||
TyDefId::BuiltinType(_) => None,
|
||||
TyDefId::AdtId(it) => Some(it.into()),
|
||||
TyDefId::TypeAliasId(it) => Some(it.into()),
|
||||
};
|
||||
substs_from_path_segment(db, resolver, segment, def_generic, false)
|
||||
let substs = substs_from_path_segment(db, resolver, segment, generic_def, false);
|
||||
db.ty(typable).subst(&substs)
|
||||
}
|
||||
|
||||
/// Collect generic arguments from a path into a `Substs`. See also
|
||||
|
@ -303,17 +275,18 @@ impl Ty {
|
|||
db: &impl HirDatabase,
|
||||
resolver: &Resolver,
|
||||
path: &Path,
|
||||
resolved: TypableDef,
|
||||
// Note that we don't call `db.value_type(resolved)` here,
|
||||
// `ValueTyDefId` is just a convenient way to pass generics and
|
||||
// special-case enum variants
|
||||
resolved: ValueTyDefId,
|
||||
) -> Substs {
|
||||
let last = path.segments.last().expect("path should have at least one segment");
|
||||
let segment = match resolved {
|
||||
TypableDef::Function(_)
|
||||
| TypableDef::Adt(_)
|
||||
| TypableDef::Const(_)
|
||||
| TypableDef::Static(_)
|
||||
| TypableDef::TypeAlias(_)
|
||||
| TypableDef::BuiltinType(_) => last,
|
||||
TypableDef::EnumVariant(_) => {
|
||||
let (segment, generic_def) = match resolved {
|
||||
ValueTyDefId::FunctionId(it) => (last, Some(it.into())),
|
||||
ValueTyDefId::StructId(it) => (last, Some(it.into())),
|
||||
ValueTyDefId::ConstId(it) => (last, Some(it.into())),
|
||||
ValueTyDefId::StaticId(_) => (last, None),
|
||||
ValueTyDefId::EnumVariantId(var) => {
|
||||
// the generic args for an enum variant may be either specified
|
||||
// on the segment referring to the enum, or on the segment
|
||||
// referring to the variant. So `Option::<T>::None` and
|
||||
|
@ -327,10 +300,10 @@ impl Ty {
|
|||
// Option::None::<T>
|
||||
last
|
||||
};
|
||||
segment
|
||||
(segment, Some(var.parent.into()))
|
||||
}
|
||||
};
|
||||
Ty::substs_from_path_segment(db, resolver, segment, resolved)
|
||||
substs_from_path_segment(db, resolver, segment, generic_def, false)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -338,7 +311,7 @@ pub(super) fn substs_from_path_segment(
|
|||
db: &impl HirDatabase,
|
||||
resolver: &Resolver,
|
||||
segment: &PathSegment,
|
||||
def_generic: Option<GenericDef>,
|
||||
def_generic: Option<GenericDefId>,
|
||||
add_self_param: bool,
|
||||
) -> Substs {
|
||||
let mut substs = Vec::new();
|
||||
|
@ -376,7 +349,7 @@ pub(super) fn substs_from_path_segment(
|
|||
|
||||
// handle defaults
|
||||
if let Some(def_generic) = def_generic {
|
||||
let default_substs = db.generic_defaults(def_generic);
|
||||
let default_substs = db.generic_defaults(def_generic.into());
|
||||
assert_eq!(substs.len(), default_substs.len());
|
||||
|
||||
for (i, default_ty) in default_substs.iter().enumerate() {
|
||||
|
@ -390,7 +363,7 @@ pub(super) fn substs_from_path_segment(
|
|||
}
|
||||
|
||||
impl TraitRef {
|
||||
pub(crate) fn from_path(
|
||||
fn from_path(
|
||||
db: &impl HirDatabase,
|
||||
resolver: &Resolver,
|
||||
path: &Path,
|
||||
|
@ -404,10 +377,10 @@ impl TraitRef {
|
|||
Some(TraitRef::from_resolved_path(db, resolver, resolved.into(), segment, explicit_self_ty))
|
||||
}
|
||||
|
||||
pub(super) fn from_resolved_path(
|
||||
pub(crate) fn from_resolved_path(
|
||||
db: &impl HirDatabase,
|
||||
resolver: &Resolver,
|
||||
resolved: Trait,
|
||||
resolved: TraitId,
|
||||
segment: &PathSegment,
|
||||
explicit_self_ty: Option<Ty>,
|
||||
) -> Self {
|
||||
|
@ -418,7 +391,7 @@ impl TraitRef {
|
|||
TraitRef { trait_: resolved, substs }
|
||||
}
|
||||
|
||||
pub(crate) fn from_hir(
|
||||
fn from_hir(
|
||||
db: &impl HirDatabase,
|
||||
resolver: &Resolver,
|
||||
type_ref: &TypeRef,
|
||||
|
@ -435,18 +408,13 @@ impl TraitRef {
|
|||
db: &impl HirDatabase,
|
||||
resolver: &Resolver,
|
||||
segment: &PathSegment,
|
||||
resolved: Trait,
|
||||
resolved: TraitId,
|
||||
) -> Substs {
|
||||
let has_self_param =
|
||||
segment.args_and_bindings.as_ref().map(|a| a.has_self_type).unwrap_or(false);
|
||||
substs_from_path_segment(db, resolver, segment, Some(resolved.into()), !has_self_param)
|
||||
}
|
||||
|
||||
pub(crate) fn for_trait(db: &impl HirDatabase, trait_: Trait) -> TraitRef {
|
||||
let substs = Substs::identity(&db.generic_params(trait_.id.into()));
|
||||
TraitRef { trait_, substs }
|
||||
}
|
||||
|
||||
pub(crate) fn from_type_bound(
|
||||
db: &impl HirDatabase,
|
||||
resolver: &Resolver,
|
||||
|
@ -502,10 +470,11 @@ fn assoc_type_bindings_from_type_bound<'a>(
|
|||
.flat_map(|args_and_bindings| args_and_bindings.bindings.iter())
|
||||
.map(move |(name, type_ref)| {
|
||||
let associated_ty =
|
||||
match trait_ref.trait_.associated_type_by_name_including_super_traits(db, &name) {
|
||||
None => return GenericPredicate::Error,
|
||||
Some(t) => t,
|
||||
};
|
||||
associated_type_by_name_including_super_traits(db, trait_ref.trait_, &name);
|
||||
let associated_ty = match associated_ty {
|
||||
None => return GenericPredicate::Error,
|
||||
Some(t) => t,
|
||||
};
|
||||
let projection_ty =
|
||||
ProjectionTy { associated_ty, parameters: trait_ref.substs.clone() };
|
||||
let ty = Ty::from_hir(db, resolver, type_ref);
|
||||
|
@ -514,39 +483,12 @@ fn assoc_type_bindings_from_type_bound<'a>(
|
|||
})
|
||||
}
|
||||
|
||||
/// Build the declared type of an item. This depends on the namespace; e.g. for
|
||||
/// `struct Foo(usize)`, we have two types: The type of the struct itself, and
|
||||
/// the constructor function `(usize) -> Foo` which lives in the values
|
||||
/// namespace.
|
||||
pub(crate) fn type_for_def(db: &impl HirDatabase, def: TypableDef, ns: Namespace) -> Ty {
|
||||
match (def, ns) {
|
||||
(TypableDef::Function(f), Namespace::Values) => type_for_fn(db, f),
|
||||
(TypableDef::Adt(Adt::Struct(s)), Namespace::Values) => type_for_struct_constructor(db, s),
|
||||
(TypableDef::Adt(adt), Namespace::Types) => type_for_adt(db, adt),
|
||||
(TypableDef::EnumVariant(v), Namespace::Values) => type_for_enum_variant_constructor(db, v),
|
||||
(TypableDef::TypeAlias(t), Namespace::Types) => type_for_type_alias(db, t),
|
||||
(TypableDef::Const(c), Namespace::Values) => type_for_const(db, c),
|
||||
(TypableDef::Static(c), Namespace::Values) => type_for_static(db, c),
|
||||
(TypableDef::BuiltinType(t), Namespace::Types) => type_for_builtin(t),
|
||||
|
||||
// 'error' cases:
|
||||
(TypableDef::Function(_), Namespace::Types) => Ty::Unknown,
|
||||
(TypableDef::Adt(Adt::Union(_)), Namespace::Values) => Ty::Unknown,
|
||||
(TypableDef::Adt(Adt::Enum(_)), Namespace::Values) => Ty::Unknown,
|
||||
(TypableDef::EnumVariant(_), Namespace::Types) => Ty::Unknown,
|
||||
(TypableDef::TypeAlias(_), Namespace::Values) => Ty::Unknown,
|
||||
(TypableDef::Const(_), Namespace::Types) => Ty::Unknown,
|
||||
(TypableDef::Static(_), Namespace::Types) => Ty::Unknown,
|
||||
(TypableDef::BuiltinType(_), Namespace::Values) => Ty::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
/// Build the signature of a callable item (function, struct or enum variant).
|
||||
pub(crate) fn callable_item_sig(db: &impl HirDatabase, def: CallableDef) -> FnSig {
|
||||
pub fn callable_item_sig(db: &impl HirDatabase, def: CallableDef) -> FnSig {
|
||||
match def {
|
||||
CallableDef::Function(f) => fn_sig_for_fn(db, f),
|
||||
CallableDef::Struct(s) => fn_sig_for_struct_constructor(db, s),
|
||||
CallableDef::EnumVariant(e) => fn_sig_for_enum_variant_constructor(db, e),
|
||||
CallableDef::FunctionId(f) => fn_sig_for_fn(db, f),
|
||||
CallableDef::StructId(s) => fn_sig_for_struct_constructor(db, s),
|
||||
CallableDef::EnumVariantId(e) => fn_sig_for_enum_variant_constructor(db, e),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -555,12 +497,11 @@ pub(crate) fn field_types_query(
|
|||
db: &impl HirDatabase,
|
||||
variant_id: VariantId,
|
||||
) -> Arc<ArenaMap<LocalStructFieldId, Ty>> {
|
||||
let (resolver, var_data) = match variant_id {
|
||||
VariantId::StructId(it) => (it.resolver(db), db.struct_data(it.0).variant_data.clone()),
|
||||
VariantId::EnumVariantId(it) => (
|
||||
it.parent.resolver(db),
|
||||
db.enum_data(it.parent).variants[it.local_id].variant_data.clone(),
|
||||
),
|
||||
let var_data = variant_data(db, variant_id);
|
||||
let resolver = match variant_id {
|
||||
VariantId::StructId(it) => it.resolver(db),
|
||||
VariantId::UnionId(it) => it.resolver(db),
|
||||
VariantId::EnumVariantId(it) => it.parent.resolver(db),
|
||||
};
|
||||
let mut res = ArenaMap::default();
|
||||
for (field_id, field_data) in var_data.fields().iter() {
|
||||
|
@ -579,10 +520,10 @@ pub(crate) fn field_types_query(
|
|||
/// these are fine: `T: Foo<U::Item>, U: Foo<()>`.
|
||||
pub(crate) fn generic_predicates_for_param_query(
|
||||
db: &impl HirDatabase,
|
||||
def: GenericDef,
|
||||
def: GenericDefId,
|
||||
param_idx: u32,
|
||||
) -> Arc<[GenericPredicate]> {
|
||||
let resolver = GenericDefId::from(def).resolver(db);
|
||||
let resolver = def.resolver(db);
|
||||
resolver
|
||||
.where_predicates_in_scope()
|
||||
// we have to filter out all other predicates *first*, before attempting to lower them
|
||||
|
@ -591,24 +532,23 @@ pub(crate) fn generic_predicates_for_param_query(
|
|||
.collect()
|
||||
}
|
||||
|
||||
pub(crate) fn trait_env(
|
||||
db: &impl HirDatabase,
|
||||
resolver: &Resolver,
|
||||
) -> Arc<super::TraitEnvironment> {
|
||||
let predicates = resolver
|
||||
.where_predicates_in_scope()
|
||||
.flat_map(|pred| GenericPredicate::from_where_predicate(db, &resolver, pred))
|
||||
.collect::<Vec<_>>();
|
||||
impl TraitEnvironment {
|
||||
pub fn lower(db: &impl HirDatabase, resolver: &Resolver) -> Arc<TraitEnvironment> {
|
||||
let predicates = resolver
|
||||
.where_predicates_in_scope()
|
||||
.flat_map(|pred| GenericPredicate::from_where_predicate(db, &resolver, pred))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Arc::new(super::TraitEnvironment { predicates })
|
||||
Arc::new(TraitEnvironment { predicates })
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve the where clause(s) of an item with generics.
|
||||
pub(crate) fn generic_predicates_query(
|
||||
db: &impl HirDatabase,
|
||||
def: GenericDef,
|
||||
def: GenericDefId,
|
||||
) -> Arc<[GenericPredicate]> {
|
||||
let resolver = GenericDefId::from(def).resolver(db);
|
||||
let resolver = def.resolver(db);
|
||||
resolver
|
||||
.where_predicates_in_scope()
|
||||
.flat_map(|pred| GenericPredicate::from_where_predicate(db, &resolver, pred))
|
||||
|
@ -616,8 +556,8 @@ pub(crate) fn generic_predicates_query(
|
|||
}
|
||||
|
||||
/// Resolve the default type params from generics
|
||||
pub(crate) fn generic_defaults_query(db: &impl HirDatabase, def: GenericDef) -> Substs {
|
||||
let resolver = GenericDefId::from(def).resolver(db);
|
||||
pub(crate) fn generic_defaults_query(db: &impl HirDatabase, def: GenericDefId) -> Substs {
|
||||
let resolver = def.resolver(db);
|
||||
let generic_params = db.generic_params(def.into());
|
||||
|
||||
let defaults = generic_params
|
||||
|
@ -629,9 +569,9 @@ pub(crate) fn generic_defaults_query(db: &impl HirDatabase, def: GenericDef) ->
|
|||
Substs(defaults)
|
||||
}
|
||||
|
||||
fn fn_sig_for_fn(db: &impl HirDatabase, def: Function) -> FnSig {
|
||||
let data = db.function_data(def.id);
|
||||
let resolver = def.id.resolver(db);
|
||||
fn fn_sig_for_fn(db: &impl HirDatabase, def: FunctionId) -> FnSig {
|
||||
let data = db.function_data(def);
|
||||
let resolver = def.resolver(db);
|
||||
let params = data.params.iter().map(|tr| Ty::from_hir(db, &resolver, tr)).collect::<Vec<_>>();
|
||||
let ret = Ty::from_hir(db, &resolver, &data.ret_type);
|
||||
FnSig::from_params_and_return(params, ret)
|
||||
|
@ -639,24 +579,24 @@ fn fn_sig_for_fn(db: &impl HirDatabase, def: Function) -> FnSig {
|
|||
|
||||
/// Build the declared type of a function. This should not need to look at the
|
||||
/// function body.
|
||||
fn type_for_fn(db: &impl HirDatabase, def: Function) -> Ty {
|
||||
let generics = db.generic_params(def.id.into());
|
||||
fn type_for_fn(db: &impl HirDatabase, def: FunctionId) -> Ty {
|
||||
let generics = db.generic_params(def.into());
|
||||
let substs = Substs::identity(&generics);
|
||||
Ty::apply(TypeCtor::FnDef(def.into()), substs)
|
||||
}
|
||||
|
||||
/// Build the declared type of a const.
|
||||
fn type_for_const(db: &impl HirDatabase, def: Const) -> Ty {
|
||||
let data = db.const_data(def.id);
|
||||
let resolver = def.id.resolver(db);
|
||||
fn type_for_const(db: &impl HirDatabase, def: ConstId) -> Ty {
|
||||
let data = db.const_data(def);
|
||||
let resolver = def.resolver(db);
|
||||
|
||||
Ty::from_hir(db, &resolver, &data.type_ref)
|
||||
}
|
||||
|
||||
/// Build the declared type of a static.
|
||||
fn type_for_static(db: &impl HirDatabase, def: Static) -> Ty {
|
||||
let data = db.static_data(def.id);
|
||||
let resolver = def.id.resolver(db);
|
||||
fn type_for_static(db: &impl HirDatabase, def: StaticId) -> Ty {
|
||||
let data = db.static_data(def);
|
||||
let resolver = def.resolver(db);
|
||||
|
||||
Ty::from_hir(db, &resolver, &data.type_ref)
|
||||
}
|
||||
|
@ -672,160 +612,148 @@ fn type_for_builtin(def: BuiltinType) -> Ty {
|
|||
})
|
||||
}
|
||||
|
||||
impl From<BuiltinInt> for IntTy {
|
||||
fn from(t: BuiltinInt) -> Self {
|
||||
IntTy { signedness: t.signedness, bitness: t.bitness }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<BuiltinFloat> for FloatTy {
|
||||
fn from(t: BuiltinFloat) -> Self {
|
||||
FloatTy { bitness: t.bitness }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Option<BuiltinInt>> for Uncertain<IntTy> {
|
||||
fn from(t: Option<BuiltinInt>) -> Self {
|
||||
match t {
|
||||
None => Uncertain::Unknown,
|
||||
Some(t) => Uncertain::Known(t.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Option<BuiltinFloat>> for Uncertain<FloatTy> {
|
||||
fn from(t: Option<BuiltinFloat>) -> Self {
|
||||
match t {
|
||||
None => Uncertain::Unknown,
|
||||
Some(t) => Uncertain::Known(t.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: Struct) -> FnSig {
|
||||
let struct_data = db.struct_data(def.id.into());
|
||||
fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> FnSig {
|
||||
let struct_data = db.struct_data(def.into());
|
||||
let fields = struct_data.variant_data.fields();
|
||||
let resolver = def.id.resolver(db);
|
||||
let resolver = def.resolver(db);
|
||||
let params = fields
|
||||
.iter()
|
||||
.map(|(_, field)| Ty::from_hir(db, &resolver, &field.type_ref))
|
||||
.collect::<Vec<_>>();
|
||||
let ret = type_for_adt(db, def);
|
||||
let ret = type_for_adt(db, def.into());
|
||||
FnSig::from_params_and_return(params, ret)
|
||||
}
|
||||
|
||||
/// Build the type of a tuple struct constructor.
|
||||
fn type_for_struct_constructor(db: &impl HirDatabase, def: Struct) -> Ty {
|
||||
let struct_data = db.struct_data(def.id.into());
|
||||
fn type_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> Ty {
|
||||
let struct_data = db.struct_data(def.into());
|
||||
if struct_data.variant_data.is_unit() {
|
||||
return type_for_adt(db, def); // Unit struct
|
||||
return type_for_adt(db, def.into()); // Unit struct
|
||||
}
|
||||
let generics = db.generic_params(def.id.into());
|
||||
let generics = db.generic_params(def.into());
|
||||
let substs = Substs::identity(&generics);
|
||||
Ty::apply(TypeCtor::FnDef(def.into()), substs)
|
||||
}
|
||||
|
||||
fn fn_sig_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariant) -> FnSig {
|
||||
let var_data = def.variant_data(db);
|
||||
let fields = var_data.fields();
|
||||
let resolver = def.parent.id.resolver(db);
|
||||
fn fn_sig_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariantId) -> FnSig {
|
||||
let enum_data = db.enum_data(def.parent);
|
||||
let var_data = &enum_data.variants[def.local_id];
|
||||
let fields = var_data.variant_data.fields();
|
||||
let resolver = def.parent.resolver(db);
|
||||
let params = fields
|
||||
.iter()
|
||||
.map(|(_, field)| Ty::from_hir(db, &resolver, &field.type_ref))
|
||||
.collect::<Vec<_>>();
|
||||
let generics = db.generic_params(def.parent_enum(db).id.into());
|
||||
let generics = db.generic_params(def.parent.into());
|
||||
let substs = Substs::identity(&generics);
|
||||
let ret = type_for_adt(db, def.parent_enum(db)).subst(&substs);
|
||||
let ret = type_for_adt(db, def.parent.into()).subst(&substs);
|
||||
FnSig::from_params_and_return(params, ret)
|
||||
}
|
||||
|
||||
/// Build the type of a tuple enum variant constructor.
|
||||
fn type_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariant) -> Ty {
|
||||
let var_data = def.variant_data(db);
|
||||
fn type_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariantId) -> Ty {
|
||||
let enum_data = db.enum_data(def.parent);
|
||||
let var_data = &enum_data.variants[def.local_id].variant_data;
|
||||
if var_data.is_unit() {
|
||||
return type_for_adt(db, def.parent_enum(db)); // Unit variant
|
||||
return type_for_adt(db, def.parent.into()); // Unit variant
|
||||
}
|
||||
let generics = db.generic_params(def.parent_enum(db).id.into());
|
||||
let generics = db.generic_params(def.parent.into());
|
||||
let substs = Substs::identity(&generics);
|
||||
Ty::apply(TypeCtor::FnDef(def.into()), substs)
|
||||
Ty::apply(TypeCtor::FnDef(EnumVariantId::from(def).into()), substs)
|
||||
}
|
||||
|
||||
fn type_for_adt(db: &impl HirDatabase, adt: impl Into<Adt>) -> Ty {
|
||||
let adt = adt.into();
|
||||
let adt_id: AdtId = adt.into();
|
||||
let generics = db.generic_params(adt_id.into());
|
||||
fn type_for_adt(db: &impl HirDatabase, adt: AdtId) -> Ty {
|
||||
let generics = db.generic_params(adt.into());
|
||||
Ty::apply(TypeCtor::Adt(adt), Substs::identity(&generics))
|
||||
}
|
||||
|
||||
fn type_for_type_alias(db: &impl HirDatabase, t: TypeAlias) -> Ty {
|
||||
let generics = db.generic_params(t.id.into());
|
||||
let resolver = t.id.resolver(db);
|
||||
let type_ref = t.type_ref(db);
|
||||
fn type_for_type_alias(db: &impl HirDatabase, t: TypeAliasId) -> Ty {
|
||||
let generics = db.generic_params(t.into());
|
||||
let resolver = t.resolver(db);
|
||||
let type_ref = &db.type_alias_data(t).type_ref;
|
||||
let substs = Substs::identity(&generics);
|
||||
let inner = Ty::from_hir(db, &resolver, &type_ref.unwrap_or(TypeRef::Error));
|
||||
let inner = Ty::from_hir(db, &resolver, type_ref.as_ref().unwrap_or(&TypeRef::Error));
|
||||
inner.subst(&substs)
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum TypableDef {
|
||||
Function(Function),
|
||||
Adt(Adt),
|
||||
EnumVariant(EnumVariant),
|
||||
TypeAlias(TypeAlias),
|
||||
Const(Const),
|
||||
Static(Static),
|
||||
BuiltinType(BuiltinType),
|
||||
}
|
||||
impl_froms!(
|
||||
TypableDef: Function,
|
||||
Adt(Struct, Enum, Union),
|
||||
EnumVariant,
|
||||
TypeAlias,
|
||||
Const,
|
||||
Static,
|
||||
BuiltinType
|
||||
);
|
||||
|
||||
impl From<ModuleDef> for Option<TypableDef> {
|
||||
fn from(def: ModuleDef) -> Option<TypableDef> {
|
||||
let res = match def {
|
||||
ModuleDef::Function(f) => f.into(),
|
||||
ModuleDef::Adt(adt) => adt.into(),
|
||||
ModuleDef::EnumVariant(v) => v.into(),
|
||||
ModuleDef::TypeAlias(t) => t.into(),
|
||||
ModuleDef::Const(v) => v.into(),
|
||||
ModuleDef::Static(v) => v.into(),
|
||||
ModuleDef::BuiltinType(t) => t.into(),
|
||||
ModuleDef::Module(_) | ModuleDef::Trait(_) => return None,
|
||||
};
|
||||
Some(res)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum CallableDef {
|
||||
Function(Function),
|
||||
Struct(Struct),
|
||||
EnumVariant(EnumVariant),
|
||||
FunctionId(FunctionId),
|
||||
StructId(StructId),
|
||||
EnumVariantId(EnumVariantId),
|
||||
}
|
||||
impl_froms!(CallableDef: Function, Struct, EnumVariant);
|
||||
impl_froms!(CallableDef: FunctionId, StructId, EnumVariantId);
|
||||
|
||||
impl CallableDef {
|
||||
pub fn krate(self, db: &impl HirDatabase) -> Option<crate::Crate> {
|
||||
pub fn krate(self, db: &impl HirDatabase) -> CrateId {
|
||||
match self {
|
||||
CallableDef::Function(f) => f.krate(db),
|
||||
CallableDef::Struct(s) => s.krate(db),
|
||||
CallableDef::EnumVariant(e) => e.parent_enum(db).krate(db),
|
||||
CallableDef::FunctionId(f) => f.lookup(db).module(db).krate,
|
||||
CallableDef::StructId(s) => s.module(db).krate,
|
||||
CallableDef::EnumVariantId(e) => e.parent.module(db).krate,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CallableDef> for GenericDef {
|
||||
fn from(def: CallableDef) -> GenericDef {
|
||||
impl From<CallableDef> for GenericDefId {
|
||||
fn from(def: CallableDef) -> GenericDefId {
|
||||
match def {
|
||||
CallableDef::Function(f) => f.into(),
|
||||
CallableDef::Struct(s) => s.into(),
|
||||
CallableDef::EnumVariant(e) => e.into(),
|
||||
CallableDef::FunctionId(f) => f.into(),
|
||||
CallableDef::StructId(s) => s.into(),
|
||||
CallableDef::EnumVariantId(e) => e.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum TyDefId {
|
||||
BuiltinType(BuiltinType),
|
||||
AdtId(AdtId),
|
||||
TypeAliasId(TypeAliasId),
|
||||
}
|
||||
impl_froms!(TyDefId: BuiltinType, AdtId(StructId, EnumId, UnionId), TypeAliasId);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum ValueTyDefId {
|
||||
FunctionId(FunctionId),
|
||||
StructId(StructId),
|
||||
EnumVariantId(EnumVariantId),
|
||||
ConstId(ConstId),
|
||||
StaticId(StaticId),
|
||||
}
|
||||
impl_froms!(ValueTyDefId: FunctionId, StructId, EnumVariantId, ConstId, StaticId);
|
||||
|
||||
/// Build the declared type of an item. This depends on the namespace; e.g. for
|
||||
/// `struct Foo(usize)`, we have two types: The type of the struct itself, and
|
||||
/// the constructor function `(usize) -> Foo` which lives in the values
|
||||
/// namespace.
|
||||
pub(crate) fn ty_query(db: &impl HirDatabase, def: TyDefId) -> Ty {
|
||||
match def {
|
||||
TyDefId::BuiltinType(it) => type_for_builtin(it),
|
||||
TyDefId::AdtId(it) => type_for_adt(db, it),
|
||||
TyDefId::TypeAliasId(it) => type_for_type_alias(db, it),
|
||||
}
|
||||
}
|
||||
pub(crate) fn value_ty_query(db: &impl HirDatabase, def: ValueTyDefId) -> Ty {
|
||||
match def {
|
||||
ValueTyDefId::FunctionId(it) => type_for_fn(db, it),
|
||||
ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it),
|
||||
ValueTyDefId::EnumVariantId(it) => type_for_enum_variant_constructor(db, it),
|
||||
ValueTyDefId::ConstId(it) => type_for_const(db, it),
|
||||
ValueTyDefId::StaticId(it) => type_for_static(db, it),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn impl_ty_query(db: &impl HirDatabase, impl_id: ImplId) -> ImplTy {
|
||||
let impl_data = db.impl_data(impl_id);
|
||||
let resolver = impl_id.resolver(db);
|
||||
let self_ty = Ty::from_hir(db, &resolver, &impl_data.target_type);
|
||||
match impl_data.target_trait.as_ref() {
|
||||
Some(trait_ref) => {
|
||||
match TraitRef::from_hir(db, &resolver, trait_ref, Some(self_ty.clone())) {
|
||||
Some(it) => ImplTy::TraitRef(it),
|
||||
None => ImplTy::Inherent(self_ty),
|
||||
}
|
||||
}
|
||||
None => ImplTy::Inherent(self_ty),
|
||||
}
|
||||
}
|
|
@ -5,18 +5,23 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use arrayvec::ArrayVec;
|
||||
use hir_def::{lang_item::LangItemTarget, resolver::Resolver, AstItemDef};
|
||||
use hir_def::{
|
||||
lang_item::LangItemTarget, resolver::Resolver, type_ref::Mutability, AssocItemId, AstItemDef,
|
||||
FunctionId, HasModule, ImplId, TraitId,
|
||||
};
|
||||
use hir_expand::name::Name;
|
||||
use ra_db::CrateId;
|
||||
use ra_prof::profile;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::{
|
||||
autoderef,
|
||||
db::HirDatabase,
|
||||
ty::primitive::{FloatBitness, Uncertain},
|
||||
ty::{Ty, TypeCtor},
|
||||
AssocItem, Crate, Function, ImplBlock, Module, Mutability, Name, Trait,
|
||||
primitive::{FloatBitness, Uncertain},
|
||||
utils::all_super_traits,
|
||||
Canonical, ImplTy, InEnvironment, TraitEnvironment, TraitRef, Ty, TypeCtor,
|
||||
};
|
||||
|
||||
use super::{autoderef, lower, Canonical, InEnvironment, TraitEnvironment, TraitRef};
|
||||
|
||||
/// This is used as a key for indexing impls.
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum TyFingerprint {
|
||||
|
@ -37,99 +42,101 @@ impl TyFingerprint {
|
|||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct CrateImplBlocks {
|
||||
impls: FxHashMap<TyFingerprint, Vec<ImplBlock>>,
|
||||
impls_by_trait: FxHashMap<Trait, Vec<ImplBlock>>,
|
||||
impls: FxHashMap<TyFingerprint, Vec<ImplId>>,
|
||||
impls_by_trait: FxHashMap<TraitId, Vec<ImplId>>,
|
||||
}
|
||||
|
||||
impl CrateImplBlocks {
|
||||
pub(crate) fn impls_in_crate_query(
|
||||
db: &impl HirDatabase,
|
||||
krate: Crate,
|
||||
krate: CrateId,
|
||||
) -> Arc<CrateImplBlocks> {
|
||||
let mut crate_impl_blocks =
|
||||
let _p = profile("impls_in_crate_query");
|
||||
let mut res =
|
||||
CrateImplBlocks { impls: FxHashMap::default(), impls_by_trait: FxHashMap::default() };
|
||||
if let Some(module) = krate.root_module(db) {
|
||||
crate_impl_blocks.collect_recursive(db, module);
|
||||
}
|
||||
Arc::new(crate_impl_blocks)
|
||||
}
|
||||
pub fn lookup_impl_blocks(&self, ty: &Ty) -> impl Iterator<Item = ImplBlock> + '_ {
|
||||
let fingerprint = TyFingerprint::for_impl(ty);
|
||||
fingerprint.and_then(|f| self.impls.get(&f)).into_iter().flatten().copied()
|
||||
}
|
||||
|
||||
pub fn lookup_impl_blocks_for_trait(&self, tr: Trait) -> impl Iterator<Item = ImplBlock> + '_ {
|
||||
self.impls_by_trait.get(&tr).into_iter().flatten().copied()
|
||||
}
|
||||
|
||||
pub fn all_impls<'a>(&'a self) -> impl Iterator<Item = ImplBlock> + 'a {
|
||||
self.impls.values().chain(self.impls_by_trait.values()).flatten().copied()
|
||||
}
|
||||
|
||||
fn collect_recursive(&mut self, db: &impl HirDatabase, module: Module) {
|
||||
for impl_block in module.impl_blocks(db) {
|
||||
let target_ty = impl_block.target_ty(db);
|
||||
|
||||
if impl_block.target_trait(db).is_some() {
|
||||
if let Some(tr) = impl_block.target_trait_ref(db) {
|
||||
self.impls_by_trait.entry(tr.trait_).or_default().push(impl_block);
|
||||
}
|
||||
} else {
|
||||
if let Some(target_ty_fp) = TyFingerprint::for_impl(&target_ty) {
|
||||
self.impls.entry(target_ty_fp).or_default().push(impl_block);
|
||||
let crate_def_map = db.crate_def_map(krate);
|
||||
for (_module_id, module_data) in crate_def_map.modules.iter() {
|
||||
for &impl_id in module_data.impls.iter() {
|
||||
match db.impl_ty(impl_id) {
|
||||
ImplTy::TraitRef(tr) => {
|
||||
res.impls_by_trait.entry(tr.trait_).or_default().push(impl_id);
|
||||
}
|
||||
ImplTy::Inherent(self_ty) => {
|
||||
if let Some(self_ty_fp) = TyFingerprint::for_impl(&self_ty) {
|
||||
res.impls.entry(self_ty_fp).or_default().push(impl_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for child in module.children(db) {
|
||||
self.collect_recursive(db, child);
|
||||
}
|
||||
Arc::new(res)
|
||||
}
|
||||
pub fn lookup_impl_blocks(&self, ty: &Ty) -> impl Iterator<Item = ImplId> + '_ {
|
||||
let fingerprint = TyFingerprint::for_impl(ty);
|
||||
fingerprint.and_then(|f| self.impls.get(&f)).into_iter().flatten().copied()
|
||||
}
|
||||
|
||||
pub fn lookup_impl_blocks_for_trait(&self, tr: TraitId) -> impl Iterator<Item = ImplId> + '_ {
|
||||
self.impls_by_trait.get(&tr).into_iter().flatten().copied()
|
||||
}
|
||||
|
||||
pub fn all_impls<'a>(&'a self) -> impl Iterator<Item = ImplId> + 'a {
|
||||
self.impls.values().chain(self.impls_by_trait.values()).flatten().copied()
|
||||
}
|
||||
}
|
||||
|
||||
fn def_crates(db: &impl HirDatabase, cur_crate: Crate, ty: &Ty) -> Option<ArrayVec<[Crate; 2]>> {
|
||||
// Types like slice can have inherent impls in several crates, (core and alloc).
|
||||
// The corresponding impls are marked with lang items, so we can use them to find the required crates.
|
||||
macro_rules! lang_item_crate {
|
||||
impl Ty {
|
||||
pub fn def_crates(
|
||||
&self,
|
||||
db: &impl HirDatabase,
|
||||
cur_crate: CrateId,
|
||||
) -> Option<ArrayVec<[CrateId; 2]>> {
|
||||
// Types like slice can have inherent impls in several crates, (core and alloc).
|
||||
// The corresponding impls are marked with lang items, so we can use them to find the required crates.
|
||||
macro_rules! lang_item_crate {
|
||||
($($name:expr),+ $(,)?) => {{
|
||||
let mut v = ArrayVec::<[LangItemTarget; 2]>::new();
|
||||
$(
|
||||
v.extend(db.lang_item(cur_crate.crate_id, $name.into()));
|
||||
v.extend(db.lang_item(cur_crate, $name.into()));
|
||||
)+
|
||||
v
|
||||
}};
|
||||
}
|
||||
|
||||
let lang_item_targets = match ty {
|
||||
Ty::Apply(a_ty) => match a_ty.ctor {
|
||||
TypeCtor::Adt(def_id) => return Some(std::iter::once(def_id.krate(db)?).collect()),
|
||||
TypeCtor::Bool => lang_item_crate!("bool"),
|
||||
TypeCtor::Char => lang_item_crate!("char"),
|
||||
TypeCtor::Float(Uncertain::Known(f)) => match f.bitness {
|
||||
// There are two lang items: one in libcore (fXX) and one in libstd (fXX_runtime)
|
||||
FloatBitness::X32 => lang_item_crate!("f32", "f32_runtime"),
|
||||
FloatBitness::X64 => lang_item_crate!("f64", "f64_runtime"),
|
||||
let lang_item_targets = match self {
|
||||
Ty::Apply(a_ty) => match a_ty.ctor {
|
||||
TypeCtor::Adt(def_id) => {
|
||||
return Some(std::iter::once(def_id.module(db).krate).collect())
|
||||
}
|
||||
TypeCtor::Bool => lang_item_crate!("bool"),
|
||||
TypeCtor::Char => lang_item_crate!("char"),
|
||||
TypeCtor::Float(Uncertain::Known(f)) => match f.bitness {
|
||||
// There are two lang items: one in libcore (fXX) and one in libstd (fXX_runtime)
|
||||
FloatBitness::X32 => lang_item_crate!("f32", "f32_runtime"),
|
||||
FloatBitness::X64 => lang_item_crate!("f64", "f64_runtime"),
|
||||
},
|
||||
TypeCtor::Int(Uncertain::Known(i)) => lang_item_crate!(i.ty_to_string()),
|
||||
TypeCtor::Str => lang_item_crate!("str_alloc", "str"),
|
||||
TypeCtor::Slice => lang_item_crate!("slice_alloc", "slice"),
|
||||
TypeCtor::RawPtr(Mutability::Shared) => lang_item_crate!("const_ptr"),
|
||||
TypeCtor::RawPtr(Mutability::Mut) => lang_item_crate!("mut_ptr"),
|
||||
_ => return None,
|
||||
},
|
||||
TypeCtor::Int(Uncertain::Known(i)) => lang_item_crate!(i.ty_to_string()),
|
||||
TypeCtor::Str => lang_item_crate!("str_alloc", "str"),
|
||||
TypeCtor::Slice => lang_item_crate!("slice_alloc", "slice"),
|
||||
TypeCtor::RawPtr(Mutability::Shared) => lang_item_crate!("const_ptr"),
|
||||
TypeCtor::RawPtr(Mutability::Mut) => lang_item_crate!("mut_ptr"),
|
||||
_ => return None,
|
||||
},
|
||||
_ => return None,
|
||||
};
|
||||
let res = lang_item_targets
|
||||
.into_iter()
|
||||
.filter_map(|it| match it {
|
||||
LangItemTarget::ImplBlockId(it) => Some(it),
|
||||
_ => None,
|
||||
})
|
||||
.map(|it| it.module(db).krate.into())
|
||||
.collect();
|
||||
Some(res)
|
||||
};
|
||||
let res = lang_item_targets
|
||||
.into_iter()
|
||||
.filter_map(|it| match it {
|
||||
LangItemTarget::ImplBlockId(it) => Some(it),
|
||||
_ => None,
|
||||
})
|
||||
.map(|it| it.module(db).krate)
|
||||
.collect();
|
||||
Some(res)
|
||||
}
|
||||
}
|
||||
|
||||
/// Look up the method with the given name, returning the actual autoderefed
|
||||
/// receiver type (but without autoref applied yet).
|
||||
pub(crate) fn lookup_method(
|
||||
|
@ -137,10 +144,10 @@ pub(crate) fn lookup_method(
|
|||
db: &impl HirDatabase,
|
||||
name: &Name,
|
||||
resolver: &Resolver,
|
||||
) -> Option<(Ty, Function)> {
|
||||
) -> Option<(Ty, FunctionId)> {
|
||||
iterate_method_candidates(ty, db, resolver, Some(name), LookupMode::MethodCall, |ty, f| match f
|
||||
{
|
||||
AssocItem::Function(f) => Some((ty.clone(), f)),
|
||||
AssocItemId::FunctionId(f) => Some((ty.clone(), f)),
|
||||
_ => None,
|
||||
})
|
||||
}
|
||||
|
@ -160,13 +167,13 @@ pub enum LookupMode {
|
|||
// This would be nicer if it just returned an iterator, but that runs into
|
||||
// lifetime problems, because we need to borrow temp `CrateImplBlocks`.
|
||||
// FIXME add a context type here?
|
||||
pub(crate) fn iterate_method_candidates<T>(
|
||||
pub fn iterate_method_candidates<T>(
|
||||
ty: &Canonical<Ty>,
|
||||
db: &impl HirDatabase,
|
||||
resolver: &Resolver,
|
||||
name: Option<&Name>,
|
||||
mode: LookupMode,
|
||||
mut callback: impl FnMut(&Ty, AssocItem) -> Option<T>,
|
||||
mut callback: impl FnMut(&Ty, AssocItemId) -> Option<T>,
|
||||
) -> Option<T> {
|
||||
let krate = resolver.krate()?;
|
||||
match mode {
|
||||
|
@ -179,16 +186,12 @@ pub(crate) fn iterate_method_candidates<T>(
|
|||
// Also note that when we've got a receiver like &S, even if the method we
|
||||
// find in the end takes &self, we still do the autoderef step (just as
|
||||
// rustc does an autoderef and then autoref again).
|
||||
|
||||
for derefed_ty in autoderef::autoderef(db, resolver, ty.clone()) {
|
||||
if let Some(result) = iterate_inherent_methods(
|
||||
&derefed_ty,
|
||||
db,
|
||||
name,
|
||||
mode,
|
||||
krate.into(),
|
||||
&mut callback,
|
||||
) {
|
||||
let environment = TraitEnvironment::lower(db, resolver);
|
||||
let ty = InEnvironment { value: ty.clone(), environment };
|
||||
for derefed_ty in autoderef::autoderef(db, resolver.krate(), ty) {
|
||||
if let Some(result) =
|
||||
iterate_inherent_methods(&derefed_ty, db, name, mode, krate, &mut callback)
|
||||
{
|
||||
return Some(result);
|
||||
}
|
||||
if let Some(result) = iterate_trait_method_candidates(
|
||||
|
@ -226,30 +229,29 @@ fn iterate_trait_method_candidates<T>(
|
|||
resolver: &Resolver,
|
||||
name: Option<&Name>,
|
||||
mode: LookupMode,
|
||||
mut callback: impl FnMut(&Ty, AssocItem) -> Option<T>,
|
||||
mut callback: impl FnMut(&Ty, AssocItemId) -> Option<T>,
|
||||
) -> Option<T> {
|
||||
let krate = resolver.krate()?;
|
||||
// FIXME: maybe put the trait_env behind a query (need to figure out good input parameters for that)
|
||||
let env = lower::trait_env(db, resolver);
|
||||
let env = TraitEnvironment::lower(db, resolver);
|
||||
// if ty is `impl Trait` or `dyn Trait`, the trait doesn't need to be in scope
|
||||
let inherent_trait = ty.value.inherent_trait().into_iter();
|
||||
// if we have `T: Trait` in the param env, the trait doesn't need to be in scope
|
||||
let traits_from_env = env
|
||||
.trait_predicates_for_self_ty(&ty.value)
|
||||
.map(|tr| tr.trait_)
|
||||
.flat_map(|t| t.all_super_traits(db));
|
||||
let traits = inherent_trait
|
||||
.chain(traits_from_env)
|
||||
.chain(resolver.traits_in_scope(db).into_iter().map(Trait::from));
|
||||
.flat_map(|t| all_super_traits(db, t));
|
||||
let traits =
|
||||
inherent_trait.chain(traits_from_env).chain(resolver.traits_in_scope(db).into_iter());
|
||||
'traits: for t in traits {
|
||||
let data = db.trait_data(t.id);
|
||||
let data = db.trait_data(t);
|
||||
|
||||
// we'll be lazy about checking whether the type implements the
|
||||
// trait, but if we find out it doesn't, we'll skip the rest of the
|
||||
// iteration
|
||||
let mut known_implemented = false;
|
||||
for &item in data.items.iter() {
|
||||
if !is_valid_candidate(db, name, mode, item.into()) {
|
||||
for (_name, item) in data.items.iter() {
|
||||
if !is_valid_candidate(db, name, mode, (*item).into()) {
|
||||
continue;
|
||||
}
|
||||
if !known_implemented {
|
||||
|
@ -259,7 +261,7 @@ fn iterate_trait_method_candidates<T>(
|
|||
}
|
||||
}
|
||||
known_implemented = true;
|
||||
if let Some(result) = callback(&ty.value, item.into()) {
|
||||
if let Some(result) = callback(&ty.value, (*item).into()) {
|
||||
return Some(result);
|
||||
}
|
||||
}
|
||||
|
@ -272,18 +274,18 @@ fn iterate_inherent_methods<T>(
|
|||
db: &impl HirDatabase,
|
||||
name: Option<&Name>,
|
||||
mode: LookupMode,
|
||||
krate: Crate,
|
||||
mut callback: impl FnMut(&Ty, AssocItem) -> Option<T>,
|
||||
krate: CrateId,
|
||||
mut callback: impl FnMut(&Ty, AssocItemId) -> Option<T>,
|
||||
) -> Option<T> {
|
||||
for krate in def_crates(db, krate, &ty.value)? {
|
||||
for krate in ty.value.def_crates(db, krate)? {
|
||||
let impls = db.impls_in_crate(krate);
|
||||
|
||||
for impl_block in impls.lookup_impl_blocks(&ty.value) {
|
||||
for item in impl_block.items(db) {
|
||||
for &item in db.impl_data(impl_block).items.iter() {
|
||||
if !is_valid_candidate(db, name, mode, item) {
|
||||
continue;
|
||||
}
|
||||
if let Some(result) = callback(&ty.value, item) {
|
||||
if let Some(result) = callback(&ty.value, item.into()) {
|
||||
return Some(result);
|
||||
}
|
||||
}
|
||||
|
@ -296,71 +298,47 @@ fn is_valid_candidate(
|
|||
db: &impl HirDatabase,
|
||||
name: Option<&Name>,
|
||||
mode: LookupMode,
|
||||
item: AssocItem,
|
||||
item: AssocItemId,
|
||||
) -> bool {
|
||||
match item {
|
||||
AssocItem::Function(m) => {
|
||||
let data = db.function_data(m.id);
|
||||
name.map_or(true, |name| data.name == *name)
|
||||
AssocItemId::FunctionId(m) => {
|
||||
let data = db.function_data(m);
|
||||
name.map_or(true, |name| &data.name == name)
|
||||
&& (data.has_self_param || mode == LookupMode::Path)
|
||||
}
|
||||
AssocItem::Const(c) => {
|
||||
name.map_or(true, |name| Some(name) == c.name(db).as_ref())
|
||||
&& (mode == LookupMode::Path)
|
||||
AssocItemId::ConstId(c) => {
|
||||
let data = db.const_data(c);
|
||||
name.map_or(true, |name| data.name.as_ref() == Some(name)) && (mode == LookupMode::Path)
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn implements_trait(
|
||||
pub fn implements_trait(
|
||||
ty: &Canonical<Ty>,
|
||||
db: &impl HirDatabase,
|
||||
resolver: &Resolver,
|
||||
krate: Crate,
|
||||
trait_: Trait,
|
||||
krate: CrateId,
|
||||
trait_: TraitId,
|
||||
) -> bool {
|
||||
if ty.value.inherent_trait() == Some(trait_) {
|
||||
// FIXME this is a bit of a hack, since Chalk should say the same thing
|
||||
// anyway, but currently Chalk doesn't implement `dyn/impl Trait` yet
|
||||
return true;
|
||||
}
|
||||
let env = lower::trait_env(db, resolver);
|
||||
let env = TraitEnvironment::lower(db, resolver);
|
||||
let goal = generic_implements_goal(db, env, trait_, ty.clone());
|
||||
let solution = db.trait_solve(krate, goal);
|
||||
let solution = db.trait_solve(krate.into(), goal);
|
||||
|
||||
solution.is_some()
|
||||
}
|
||||
|
||||
impl Ty {
|
||||
// This would be nicer if it just returned an iterator, but that runs into
|
||||
// lifetime problems, because we need to borrow temp `CrateImplBlocks`.
|
||||
pub fn iterate_impl_items<T>(
|
||||
self,
|
||||
db: &impl HirDatabase,
|
||||
krate: Crate,
|
||||
mut callback: impl FnMut(AssocItem) -> Option<T>,
|
||||
) -> Option<T> {
|
||||
for krate in def_crates(db, krate, &self)? {
|
||||
let impls = db.impls_in_crate(krate);
|
||||
|
||||
for impl_block in impls.lookup_impl_blocks(&self) {
|
||||
for item in impl_block.items(db) {
|
||||
if let Some(result) = callback(item) {
|
||||
return Some(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// This creates Substs for a trait with the given Self type and type variables
|
||||
/// for all other parameters, to query Chalk with it.
|
||||
fn generic_implements_goal(
|
||||
db: &impl HirDatabase,
|
||||
env: Arc<TraitEnvironment>,
|
||||
trait_: Trait,
|
||||
trait_: TraitId,
|
||||
self_ty: Canonical<Ty>,
|
||||
) -> Canonical<InEnvironment<super::Obligation>> {
|
||||
let num_vars = self_ty.num_vars;
|
|
@ -1,10 +1,8 @@
|
|||
//! FIXME: write short doc here
|
||||
use hir_def::expr::{BinaryOp, CmpOp};
|
||||
|
||||
use super::{InferTy, Ty, TypeCtor};
|
||||
use crate::{
|
||||
expr::{BinaryOp, CmpOp},
|
||||
ty::ApplicationTy,
|
||||
};
|
||||
use crate::ApplicationTy;
|
||||
|
||||
pub(super) fn binary_op_return_ty(op: BinaryOp, rhs_ty: Ty) -> Ty {
|
||||
match op {
|
|
@ -1,8 +1,11 @@
|
|||
//! FIXME: write short doc here
|
||||
//! Defines primitive types, which have a couple of peculiarities:
|
||||
//!
|
||||
//! * during type inference, they can be uncertain (ie, `let x = 92;`)
|
||||
//! * they don't belong to any particular crate.
|
||||
|
||||
use std::fmt;
|
||||
|
||||
pub use hir_def::builtin_type::{FloatBitness, IntBitness, Signedness};
|
||||
pub use hir_def::builtin_type::{BuiltinFloat, BuiltinInt, FloatBitness, IntBitness, Signedness};
|
||||
|
||||
#[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)]
|
||||
pub enum Uncertain<T> {
|
||||
|
@ -107,7 +110,7 @@ impl IntTy {
|
|||
IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X128 }
|
||||
}
|
||||
|
||||
pub(crate) fn ty_to_string(self) -> &'static str {
|
||||
pub fn ty_to_string(self) -> &'static str {
|
||||
match (self.signedness, self.bitness) {
|
||||
(Signedness::Signed, IntBitness::Xsize) => "isize",
|
||||
(Signedness::Signed, IntBitness::X8) => "i8",
|
||||
|
@ -151,10 +154,40 @@ impl FloatTy {
|
|||
FloatTy { bitness: FloatBitness::X64 }
|
||||
}
|
||||
|
||||
pub(crate) fn ty_to_string(self) -> &'static str {
|
||||
pub fn ty_to_string(self) -> &'static str {
|
||||
match self.bitness {
|
||||
FloatBitness::X32 => "f32",
|
||||
FloatBitness::X64 => "f64",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<BuiltinInt> for IntTy {
|
||||
fn from(t: BuiltinInt) -> Self {
|
||||
IntTy { signedness: t.signedness, bitness: t.bitness }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<BuiltinFloat> for FloatTy {
|
||||
fn from(t: BuiltinFloat) -> Self {
|
||||
FloatTy { bitness: t.bitness }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Option<BuiltinInt>> for Uncertain<IntTy> {
|
||||
fn from(t: Option<BuiltinInt>) -> Self {
|
||||
match t {
|
||||
None => Uncertain::Unknown,
|
||||
Some(t) => Uncertain::Known(t.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Option<BuiltinFloat>> for Uncertain<FloatTy> {
|
||||
fn from(t: Option<BuiltinFloat>) -> Self {
|
||||
match t {
|
||||
None => Uncertain::Unknown,
|
||||
Some(t) => Uncertain::Known(t.into()),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,21 +1,23 @@
|
|||
//! Database used for testing `hir`.
|
||||
|
||||
use std::{panic, sync::Arc};
|
||||
use std::{
|
||||
panic,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
use hir_def::{db::DefDatabase, ModuleId};
|
||||
use hir_def::{db::DefDatabase, AssocItemId, ModuleDefId, ModuleId};
|
||||
use hir_expand::diagnostics::DiagnosticSink;
|
||||
use parking_lot::Mutex;
|
||||
use ra_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, RelativePath, SourceDatabase};
|
||||
|
||||
use crate::{db, debug::HirDebugHelper};
|
||||
use crate::{db::HirDatabase, expr::ExprValidator};
|
||||
|
||||
#[salsa::database(
|
||||
ra_db::SourceDatabaseExtStorage,
|
||||
ra_db::SourceDatabaseStorage,
|
||||
db::InternDatabaseStorage,
|
||||
db::AstDatabaseStorage,
|
||||
db::DefDatabaseStorage,
|
||||
db::HirDatabaseStorage
|
||||
hir_expand::db::AstDatabaseStorage,
|
||||
hir_def::db::InternDatabaseStorage,
|
||||
hir_def::db::DefDatabaseStorage,
|
||||
crate::db::HirDatabaseStorage
|
||||
)]
|
||||
#[derive(Debug, Default)]
|
||||
pub struct TestDB {
|
||||
|
@ -28,8 +30,12 @@ impl salsa::Database for TestDB {
|
|||
&self.runtime
|
||||
}
|
||||
|
||||
fn salsa_runtime_mut(&mut self) -> &mut salsa::Runtime<Self> {
|
||||
&mut self.runtime
|
||||
}
|
||||
|
||||
fn salsa_event(&self, event: impl Fn() -> salsa::Event<TestDB>) {
|
||||
let mut events = self.events.lock();
|
||||
let mut events = self.events.lock().unwrap();
|
||||
if let Some(events) = &mut *events {
|
||||
events.push(event());
|
||||
}
|
||||
|
@ -63,32 +69,53 @@ impl FileLoader for TestDB {
|
|||
}
|
||||
}
|
||||
|
||||
// FIXME: improve `WithFixture` to bring useful hir debugging back
|
||||
impl HirDebugHelper for TestDB {
|
||||
fn crate_name(&self, _krate: CrateId) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
fn file_path(&self, _file_id: FileId) -> Option<String> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl TestDB {
|
||||
pub fn module_for_file(&self, file_id: FileId) -> ModuleId {
|
||||
for &krate in self.relevant_crates(file_id).iter() {
|
||||
let crate_def_map = self.crate_def_map(krate);
|
||||
for (local_id, data) in crate_def_map.modules.iter() {
|
||||
if data.definition == Some(file_id) {
|
||||
return ModuleId { krate, local_id };
|
||||
}
|
||||
}
|
||||
}
|
||||
panic!("Can't find module for file")
|
||||
}
|
||||
|
||||
// FIXME: don't duplicate this
|
||||
pub fn diagnostics(&self) -> String {
|
||||
let mut buf = String::new();
|
||||
let crate_graph = self.crate_graph();
|
||||
for krate in crate_graph.iter().next() {
|
||||
let crate_def_map = self.crate_def_map(krate);
|
||||
|
||||
let mut fns = Vec::new();
|
||||
for (module_id, _) in crate_def_map.modules.iter() {
|
||||
let module_id = ModuleId { krate, module_id };
|
||||
let module = crate::Module::from(module_id);
|
||||
module.diagnostics(
|
||||
self,
|
||||
&mut DiagnosticSink::new(|d| {
|
||||
buf += &format!("{:?}: {}\n", d.syntax_node(self).text(), d.message());
|
||||
}),
|
||||
)
|
||||
for decl in crate_def_map[module_id].scope.declarations() {
|
||||
match decl {
|
||||
ModuleDefId::FunctionId(f) => fns.push(f),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
for &impl_id in crate_def_map[module_id].impls.iter() {
|
||||
let impl_data = self.impl_data(impl_id);
|
||||
for item in impl_data.items.iter() {
|
||||
if let AssocItemId::FunctionId(f) = item {
|
||||
fns.push(*f)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for f in fns {
|
||||
let infer = self.infer(f.into());
|
||||
let mut sink = DiagnosticSink::new(|d| {
|
||||
buf += &format!("{:?}: {}\n", d.syntax_node(self).text(), d.message());
|
||||
});
|
||||
infer.add_diagnostics(self, f, &mut sink);
|
||||
let mut validator = ExprValidator::new(f, infer, &mut sink);
|
||||
validator.validate_body(self);
|
||||
}
|
||||
}
|
||||
buf
|
||||
|
@ -97,9 +124,9 @@ impl TestDB {
|
|||
|
||||
impl TestDB {
|
||||
pub fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event<TestDB>> {
|
||||
*self.events.lock() = Some(Vec::new());
|
||||
*self.events.lock().unwrap() = Some(Vec::new());
|
||||
f();
|
||||
self.events.lock().take().unwrap()
|
||||
self.events.lock().unwrap().take().unwrap()
|
||||
}
|
||||
|
||||
pub fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
|
|
@ -4,20 +4,20 @@ mod coercion;
|
|||
use std::fmt::Write;
|
||||
use std::sync::Arc;
|
||||
|
||||
use hir_def::{
|
||||
body::BodySourceMap, db::DefDatabase, nameres::CrateDefMap, AssocItemId, DefWithBodyId,
|
||||
LocalModuleId, Lookup, ModuleDefId,
|
||||
};
|
||||
use hir_expand::Source;
|
||||
use insta::assert_snapshot;
|
||||
use ra_db::{fixture::WithFixture, salsa::Database, FilePosition, SourceDatabase};
|
||||
use ra_syntax::{
|
||||
algo,
|
||||
ast::{self, AstNode},
|
||||
SyntaxKind::*,
|
||||
};
|
||||
use rustc_hash::FxHashSet;
|
||||
use test_utils::covers;
|
||||
|
||||
use crate::{
|
||||
expr::BodySourceMap, test_db::TestDB, ty::display::HirDisplay, ty::InferenceResult, Source,
|
||||
SourceAnalyzer,
|
||||
};
|
||||
use crate::{db::HirDatabase, display::HirDisplay, test_db::TestDB, InferenceResult};
|
||||
|
||||
// These tests compare the inference results for all expressions in a file
|
||||
// against snapshots of the expected results using insta. Use cargo-insta to
|
||||
|
@ -4674,10 +4674,20 @@ fn test<T, U>() where T: Trait<U::Item>, U: Trait<T::Item> {
|
|||
fn type_at_pos(db: &TestDB, pos: FilePosition) -> String {
|
||||
let file = db.parse(pos.file_id).ok().unwrap();
|
||||
let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap();
|
||||
let analyzer =
|
||||
SourceAnalyzer::new(db, Source::new(pos.file_id.into(), expr.syntax()), Some(pos.offset));
|
||||
let ty = analyzer.type_of(db, &expr).unwrap();
|
||||
ty.display(db).to_string()
|
||||
|
||||
let module = db.module_for_file(pos.file_id);
|
||||
let crate_def_map = db.crate_def_map(module.krate);
|
||||
for decl in crate_def_map[module.local_id].scope.declarations() {
|
||||
if let ModuleDefId::FunctionId(func) = decl {
|
||||
let (_body, source_map) = db.body_with_source_map(func.into());
|
||||
if let Some(expr_id) = source_map.node_expr(Source::new(pos.file_id.into(), &expr)) {
|
||||
let infer = db.infer(func.into());
|
||||
let ty = &infer[expr_id];
|
||||
return ty.display(db).to_string();
|
||||
}
|
||||
}
|
||||
}
|
||||
panic!("Can't find expression")
|
||||
}
|
||||
|
||||
fn type_at(content: &str) -> String {
|
||||
|
@ -4687,7 +4697,6 @@ fn type_at(content: &str) -> String {
|
|||
|
||||
fn infer(content: &str) -> String {
|
||||
let (db, file_id) = TestDB::with_single_file(content);
|
||||
let source_file = db.parse(file_id).ok().unwrap();
|
||||
|
||||
let mut acc = String::new();
|
||||
|
||||
|
@ -4740,20 +4749,69 @@ fn infer(content: &str) -> String {
|
|||
}
|
||||
};
|
||||
|
||||
let mut analyzed = FxHashSet::default();
|
||||
for node in source_file.syntax().descendants() {
|
||||
if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF {
|
||||
let analyzer = SourceAnalyzer::new(&db, Source::new(file_id.into(), &node), None);
|
||||
if analyzed.insert(analyzer.analyzed_declaration()) {
|
||||
infer_def(analyzer.inference_result(), analyzer.body_source_map());
|
||||
}
|
||||
let module = db.module_for_file(file_id);
|
||||
let crate_def_map = db.crate_def_map(module.krate);
|
||||
|
||||
let mut defs: Vec<DefWithBodyId> = Vec::new();
|
||||
visit_module(&db, &crate_def_map, module.local_id, &mut |it| defs.push(it));
|
||||
defs.sort_by_key(|def| match def {
|
||||
DefWithBodyId::FunctionId(it) => {
|
||||
it.lookup(&db).ast_id.to_node(&db).syntax().text_range().start()
|
||||
}
|
||||
DefWithBodyId::ConstId(it) => {
|
||||
it.lookup(&db).ast_id.to_node(&db).syntax().text_range().start()
|
||||
}
|
||||
DefWithBodyId::StaticId(it) => {
|
||||
it.lookup(&db).ast_id.to_node(&db).syntax().text_range().start()
|
||||
}
|
||||
});
|
||||
for def in defs {
|
||||
let (_body, source_map) = db.body_with_source_map(def);
|
||||
let infer = db.infer(def);
|
||||
infer_def(infer, source_map);
|
||||
}
|
||||
|
||||
acc.truncate(acc.trim_end().len());
|
||||
acc
|
||||
}
|
||||
|
||||
fn visit_module(
|
||||
db: &TestDB,
|
||||
crate_def_map: &CrateDefMap,
|
||||
module_id: LocalModuleId,
|
||||
cb: &mut dyn FnMut(DefWithBodyId),
|
||||
) {
|
||||
for decl in crate_def_map[module_id].scope.declarations() {
|
||||
match decl {
|
||||
ModuleDefId::FunctionId(it) => cb(it.into()),
|
||||
ModuleDefId::ConstId(it) => cb(it.into()),
|
||||
ModuleDefId::StaticId(it) => cb(it.into()),
|
||||
ModuleDefId::TraitId(it) => {
|
||||
let trait_data = db.trait_data(it);
|
||||
for &(_, item) in trait_data.items.iter() {
|
||||
match item {
|
||||
AssocItemId::FunctionId(it) => cb(it.into()),
|
||||
AssocItemId::ConstId(it) => cb(it.into()),
|
||||
AssocItemId::TypeAliasId(_) => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
ModuleDefId::ModuleId(it) => visit_module(db, crate_def_map, it.local_id, cb),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
for &impl_id in crate_def_map[module_id].impls.iter() {
|
||||
let impl_data = db.impl_data(impl_id);
|
||||
for &item in impl_data.items.iter() {
|
||||
match item {
|
||||
AssocItemId::FunctionId(it) => cb(it.into()),
|
||||
AssocItemId::ConstId(it) => cb(it.into()),
|
||||
AssocItemId::TypeAliasId(_) => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn ellipsize(mut text: String, max_len: usize) -> String {
|
||||
if text.len() <= max_len {
|
||||
return text;
|
||||
|
@ -4783,10 +4841,12 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
|||
",
|
||||
);
|
||||
{
|
||||
let file = db.parse(pos.file_id).ok().unwrap();
|
||||
let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent();
|
||||
let events = db.log_executed(|| {
|
||||
SourceAnalyzer::new(&db, Source::new(pos.file_id.into(), &node), None);
|
||||
let module = db.module_for_file(pos.file_id);
|
||||
let crate_def_map = db.crate_def_map(module.krate);
|
||||
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
|
||||
db.infer(def);
|
||||
});
|
||||
});
|
||||
assert!(format!("{:?}", events).contains("infer"))
|
||||
}
|
||||
|
@ -4803,10 +4863,12 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
|||
db.query_mut(ra_db::FileTextQuery).set(pos.file_id, Arc::new(new_text));
|
||||
|
||||
{
|
||||
let file = db.parse(pos.file_id).ok().unwrap();
|
||||
let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent();
|
||||
let events = db.log_executed(|| {
|
||||
SourceAnalyzer::new(&db, Source::new(pos.file_id.into(), &node), None);
|
||||
let module = db.module_for_file(pos.file_id);
|
||||
let crate_def_map = db.crate_def_map(module.krate);
|
||||
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
|
||||
db.infer(def);
|
||||
});
|
||||
});
|
||||
assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events)
|
||||
}
|
||||
|
@ -4832,7 +4894,8 @@ fn no_such_field_diagnostics() {
|
|||
|
||||
assert_snapshot!(diagnostics, @r###"
|
||||
"baz: 62": no such field
|
||||
"{\n foo: 92,\n baz: 62,\n }": fill structure fields
|
||||
"{\n foo: 92,\n baz: 62,\n }": Missing structure fields:
|
||||
- bar
|
||||
"###
|
||||
);
|
||||
}
|
|
@ -2,13 +2,15 @@
|
|||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use chalk_ir::{cast::Cast, family::ChalkIr};
|
||||
use hir_def::{expr::ExprId, DefWithBodyId, ImplId, TraitId, TypeAliasId};
|
||||
use log::debug;
|
||||
use ra_db::{impl_intern_key, salsa};
|
||||
use ra_db::{impl_intern_key, salsa, CrateId};
|
||||
use ra_prof::profile;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::db::HirDatabase;
|
||||
|
||||
use super::{Canonical, GenericPredicate, HirDisplay, ProjectionTy, TraitRef, Ty, TypeWalk};
|
||||
use crate::{db::HirDatabase, expr::ExprId, Crate, DefWithBody, ImplBlock, Trait, TypeAlias};
|
||||
|
||||
use self::chalk::{from_chalk, ToChalk};
|
||||
|
||||
|
@ -16,7 +18,7 @@ pub(crate) mod chalk;
|
|||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TraitSolver {
|
||||
krate: Crate,
|
||||
krate: CrateId,
|
||||
inner: Arc<Mutex<chalk_solve::Solver<ChalkIr>>>,
|
||||
}
|
||||
|
||||
|
@ -58,12 +60,12 @@ const CHALK_SOLVER_MAX_SIZE: usize = 4;
|
|||
#[derive(Debug, Copy, Clone)]
|
||||
struct ChalkContext<'a, DB> {
|
||||
db: &'a DB,
|
||||
krate: Crate,
|
||||
krate: CrateId,
|
||||
}
|
||||
|
||||
pub(crate) fn trait_solver_query(
|
||||
db: &(impl HirDatabase + salsa::Database),
|
||||
krate: Crate,
|
||||
krate: CrateId,
|
||||
) -> TraitSolver {
|
||||
db.salsa_runtime().report_untracked_read();
|
||||
// krate parameter is just so we cache a unique solver per crate
|
||||
|
@ -75,17 +77,17 @@ pub(crate) fn trait_solver_query(
|
|||
/// Collects impls for the given trait in the whole dependency tree of `krate`.
|
||||
pub(crate) fn impls_for_trait_query(
|
||||
db: &impl HirDatabase,
|
||||
krate: Crate,
|
||||
trait_: Trait,
|
||||
) -> Arc<[ImplBlock]> {
|
||||
krate: CrateId,
|
||||
trait_: TraitId,
|
||||
) -> Arc<[ImplId]> {
|
||||
let mut impls = FxHashSet::default();
|
||||
// We call the query recursively here. On the one hand, this means we can
|
||||
// reuse results from queries for different crates; on the other hand, this
|
||||
// will only ever get called for a few crates near the root of the tree (the
|
||||
// ones the user is editing), so this may actually be a waste of memory. I'm
|
||||
// doing it like this mainly for simplicity for now.
|
||||
for dep in krate.dependencies(db) {
|
||||
impls.extend(db.impls_for_trait(dep.krate, trait_).iter());
|
||||
for dep in db.crate_graph().dependencies(krate) {
|
||||
impls.extend(db.impls_for_trait(dep.crate_id, trait_).iter());
|
||||
}
|
||||
let crate_impl_blocks = db.impls_in_crate(krate);
|
||||
impls.extend(crate_impl_blocks.lookup_impl_blocks_for_trait(trait_));
|
||||
|
@ -174,7 +176,7 @@ impl TypeWalk for ProjectionPredicate {
|
|||
/// Solve a trait goal using Chalk.
|
||||
pub(crate) fn trait_solve_query(
|
||||
db: &impl HirDatabase,
|
||||
krate: Crate,
|
||||
krate: CrateId,
|
||||
goal: Canonical<InEnvironment<Obligation>>,
|
||||
) -> Option<Solution> {
|
||||
let _p = profile("trait_solve_query");
|
||||
|
@ -290,7 +292,7 @@ impl FnTrait {
|
|||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct ClosureFnTraitImplData {
|
||||
def: DefWithBody,
|
||||
def: DefWithBodyId,
|
||||
expr: ExprId,
|
||||
fn_trait: FnTrait,
|
||||
}
|
||||
|
@ -300,7 +302,7 @@ pub struct ClosureFnTraitImplData {
|
|||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum Impl {
|
||||
/// A normal impl from an impl block.
|
||||
ImplBlock(ImplBlock),
|
||||
ImplBlock(ImplId),
|
||||
/// Closure types implement the Fn traits synthetically.
|
||||
ClosureFnTraitImpl(ClosureFnTraitImplData),
|
||||
}
|
||||
|
@ -315,7 +317,7 @@ impl_intern_key!(GlobalImplId);
|
|||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum AssocTyValue {
|
||||
/// A normal assoc type value from an impl block.
|
||||
TypeAlias(TypeAlias),
|
||||
TypeAlias(TypeAliasId),
|
||||
/// The output type of the Fn trait implementation.
|
||||
ClosureFnTraitImplOutput(ClosureFnTraitImplData),
|
||||
}
|
|
@ -4,22 +4,24 @@ use std::sync::Arc;
|
|||
use log::debug;
|
||||
|
||||
use chalk_ir::{
|
||||
cast::Cast, family::ChalkIr, Identifier, ImplId, Parameter, PlaceholderIndex, TypeId,
|
||||
TypeKindId, TypeName, UniverseIndex,
|
||||
cast::Cast, family::ChalkIr, Identifier, Parameter, PlaceholderIndex, TypeId, TypeKindId,
|
||||
TypeName, UniverseIndex,
|
||||
};
|
||||
use chalk_rust_ir::{AssociatedTyDatum, AssociatedTyValue, ImplDatum, StructDatum, TraitDatum};
|
||||
use ra_db::CrateId;
|
||||
|
||||
use hir_def::lang_item::LangItemTarget;
|
||||
use hir_def::{
|
||||
expr::Expr, lang_item::LangItemTarget, AssocItemId, AstItemDef, ContainerId, GenericDefId,
|
||||
ImplId, Lookup, TraitId, TypeAliasId,
|
||||
};
|
||||
use hir_expand::name;
|
||||
|
||||
use ra_db::salsa::{InternId, InternKey};
|
||||
|
||||
use super::{AssocTyValue, Canonical, ChalkContext, Impl, Obligation};
|
||||
use crate::{
|
||||
db::HirDatabase,
|
||||
ty::display::HirDisplay,
|
||||
ty::{ApplicationTy, GenericPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, TypeWalk},
|
||||
Crate, GenericDef, ImplBlock, Trait, TypeAlias,
|
||||
db::HirDatabase, display::HirDisplay, ApplicationTy, GenericPredicate, ImplTy, ProjectionTy,
|
||||
Substs, TraitRef, Ty, TypeCtor, TypeWalk,
|
||||
};
|
||||
|
||||
/// This represents a trait whose name we could not resolve.
|
||||
|
@ -167,15 +169,15 @@ impl ToChalk for TraitRef {
|
|||
}
|
||||
}
|
||||
|
||||
impl ToChalk for Trait {
|
||||
impl ToChalk for TraitId {
|
||||
type Chalk = chalk_ir::TraitId;
|
||||
|
||||
fn to_chalk(self, _db: &impl HirDatabase) -> chalk_ir::TraitId {
|
||||
chalk_ir::TraitId(id_to_chalk(self.id))
|
||||
chalk_ir::TraitId(id_to_chalk(self))
|
||||
}
|
||||
|
||||
fn from_chalk(_db: &impl HirDatabase, trait_id: chalk_ir::TraitId) -> Trait {
|
||||
Trait { id: id_from_chalk(trait_id.0) }
|
||||
fn from_chalk(_db: &impl HirDatabase, trait_id: chalk_ir::TraitId) -> TraitId {
|
||||
id_from_chalk(trait_id.0)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -203,15 +205,15 @@ impl ToChalk for Impl {
|
|||
}
|
||||
}
|
||||
|
||||
impl ToChalk for TypeAlias {
|
||||
impl ToChalk for TypeAliasId {
|
||||
type Chalk = chalk_ir::TypeId;
|
||||
|
||||
fn to_chalk(self, _db: &impl HirDatabase) -> chalk_ir::TypeId {
|
||||
chalk_ir::TypeId(id_to_chalk(self.id))
|
||||
chalk_ir::TypeId(id_to_chalk(self))
|
||||
}
|
||||
|
||||
fn from_chalk(_db: &impl HirDatabase, type_alias_id: chalk_ir::TypeId) -> TypeAlias {
|
||||
TypeAlias { id: id_from_chalk(type_alias_id.0) }
|
||||
fn from_chalk(_db: &impl HirDatabase, type_alias_id: chalk_ir::TypeId) -> TypeAliasId {
|
||||
id_from_chalk(type_alias_id.0)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -402,7 +404,7 @@ fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T> {
|
|||
|
||||
fn convert_where_clauses(
|
||||
db: &impl HirDatabase,
|
||||
def: GenericDef,
|
||||
def: GenericDefId,
|
||||
substs: &Substs,
|
||||
) -> Vec<chalk_ir::QuantifiedWhereClause<ChalkIr>> {
|
||||
let generic_predicates = db.generic_predicates(def);
|
||||
|
@ -431,25 +433,25 @@ where
|
|||
fn struct_datum(&self, struct_id: chalk_ir::StructId) -> Arc<StructDatum<ChalkIr>> {
|
||||
self.db.struct_datum(self.krate, struct_id)
|
||||
}
|
||||
fn impl_datum(&self, impl_id: ImplId) -> Arc<ImplDatum<ChalkIr>> {
|
||||
fn impl_datum(&self, impl_id: chalk_ir::ImplId) -> Arc<ImplDatum<ChalkIr>> {
|
||||
self.db.impl_datum(self.krate, impl_id)
|
||||
}
|
||||
fn impls_for_trait(
|
||||
&self,
|
||||
trait_id: chalk_ir::TraitId,
|
||||
parameters: &[Parameter<ChalkIr>],
|
||||
) -> Vec<ImplId> {
|
||||
) -> Vec<chalk_ir::ImplId> {
|
||||
debug!("impls_for_trait {:?}", trait_id);
|
||||
if trait_id == UNKNOWN_TRAIT {
|
||||
return Vec::new();
|
||||
}
|
||||
let trait_: Trait = from_chalk(self.db, trait_id);
|
||||
let trait_: TraitId = from_chalk(self.db, trait_id);
|
||||
let mut result: Vec<_> = self
|
||||
.db
|
||||
.impls_for_trait(self.krate, trait_)
|
||||
.impls_for_trait(self.krate, trait_.into())
|
||||
.iter()
|
||||
.copied()
|
||||
.map(Impl::ImplBlock)
|
||||
.map(|it| Impl::ImplBlock(it.into()))
|
||||
.map(|impl_| impl_.to_chalk(self.db))
|
||||
.collect();
|
||||
|
||||
|
@ -485,7 +487,7 @@ where
|
|||
&self,
|
||||
id: chalk_rust_ir::AssociatedTyValueId,
|
||||
) -> Arc<AssociatedTyValue<ChalkIr>> {
|
||||
self.db.associated_ty_value(self.krate, id)
|
||||
self.db.associated_ty_value(self.krate.into(), id)
|
||||
}
|
||||
fn custom_clauses(&self) -> Vec<chalk_ir::ProgramClause<ChalkIr>> {
|
||||
vec![]
|
||||
|
@ -504,12 +506,12 @@ pub(crate) fn associated_ty_data_query(
|
|||
id: TypeId,
|
||||
) -> Arc<AssociatedTyDatum<ChalkIr>> {
|
||||
debug!("associated_ty_data {:?}", id);
|
||||
let type_alias: TypeAlias = from_chalk(db, id);
|
||||
let trait_ = match type_alias.container(db) {
|
||||
Some(crate::Container::Trait(t)) => t,
|
||||
let type_alias: TypeAliasId = from_chalk(db, id);
|
||||
let trait_ = match type_alias.lookup(db).container {
|
||||
ContainerId::TraitId(t) => t,
|
||||
_ => panic!("associated type not in trait"),
|
||||
};
|
||||
let generic_params = db.generic_params(type_alias.id.into());
|
||||
let generic_params = db.generic_params(type_alias.into());
|
||||
let bound_data = chalk_rust_ir::AssociatedTyDatumBound {
|
||||
// FIXME add bounds and where clauses
|
||||
bounds: vec![],
|
||||
|
@ -518,7 +520,7 @@ pub(crate) fn associated_ty_data_query(
|
|||
let datum = AssociatedTyDatum {
|
||||
trait_id: trait_.to_chalk(db),
|
||||
id,
|
||||
name: lalrpop_intern::intern(&type_alias.name(db).to_string()),
|
||||
name: lalrpop_intern::intern(&db.type_alias_data(type_alias).name.to_string()),
|
||||
binders: make_binders(bound_data, generic_params.count_params_including_parent()),
|
||||
};
|
||||
Arc::new(datum)
|
||||
|
@ -526,7 +528,7 @@ pub(crate) fn associated_ty_data_query(
|
|||
|
||||
pub(crate) fn trait_datum_query(
|
||||
db: &impl HirDatabase,
|
||||
krate: Crate,
|
||||
krate: CrateId,
|
||||
trait_id: chalk_ir::TraitId,
|
||||
) -> Arc<TraitDatum<ChalkIr>> {
|
||||
debug!("trait_datum {:?}", trait_id);
|
||||
|
@ -548,13 +550,14 @@ pub(crate) fn trait_datum_query(
|
|||
associated_ty_ids: vec![],
|
||||
});
|
||||
}
|
||||
let trait_: Trait = from_chalk(db, trait_id);
|
||||
debug!("trait {:?} = {:?}", trait_id, trait_.name(db));
|
||||
let generic_params = db.generic_params(trait_.id.into());
|
||||
let trait_: TraitId = from_chalk(db, trait_id);
|
||||
let trait_data = db.trait_data(trait_);
|
||||
debug!("trait {:?} = {:?}", trait_id, trait_data.name);
|
||||
let generic_params = db.generic_params(trait_.into());
|
||||
let bound_vars = Substs::bound_vars(&generic_params);
|
||||
let flags = chalk_rust_ir::TraitFlags {
|
||||
auto: trait_.is_auto(db),
|
||||
upstream: trait_.module(db).krate() != krate,
|
||||
auto: trait_data.auto,
|
||||
upstream: trait_.module(db).krate != krate,
|
||||
non_enumerable: true,
|
||||
coinductive: false, // only relevant for Chalk testing
|
||||
// FIXME set these flags correctly
|
||||
|
@ -562,15 +565,8 @@ pub(crate) fn trait_datum_query(
|
|||
fundamental: false,
|
||||
};
|
||||
let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
|
||||
let associated_ty_ids = trait_
|
||||
.items(db)
|
||||
.into_iter()
|
||||
.filter_map(|trait_item| match trait_item {
|
||||
crate::AssocItem::TypeAlias(type_alias) => Some(type_alias),
|
||||
_ => None,
|
||||
})
|
||||
.map(|type_alias| type_alias.to_chalk(db))
|
||||
.collect();
|
||||
let associated_ty_ids =
|
||||
trait_data.associated_types().map(|type_alias| type_alias.to_chalk(db)).collect();
|
||||
let trait_datum_bound = chalk_rust_ir::TraitDatumBound { where_clauses };
|
||||
let trait_datum = TraitDatum {
|
||||
id: trait_id,
|
||||
|
@ -583,7 +579,7 @@ pub(crate) fn trait_datum_query(
|
|||
|
||||
pub(crate) fn struct_datum_query(
|
||||
db: &impl HirDatabase,
|
||||
krate: Crate,
|
||||
krate: CrateId,
|
||||
struct_id: chalk_ir::StructId,
|
||||
) -> Arc<StructDatum<ChalkIr>> {
|
||||
debug!("struct_datum {:?}", struct_id);
|
||||
|
@ -615,8 +611,8 @@ pub(crate) fn struct_datum_query(
|
|||
|
||||
pub(crate) fn impl_datum_query(
|
||||
db: &impl HirDatabase,
|
||||
krate: Crate,
|
||||
impl_id: ImplId,
|
||||
krate: CrateId,
|
||||
impl_id: chalk_ir::ImplId,
|
||||
) -> Arc<ImplDatum<ChalkIr>> {
|
||||
let _p = ra_prof::profile("impl_datum");
|
||||
debug!("impl_datum {:?}", impl_id);
|
||||
|
@ -630,24 +626,30 @@ pub(crate) fn impl_datum_query(
|
|||
|
||||
fn impl_block_datum(
|
||||
db: &impl HirDatabase,
|
||||
krate: Crate,
|
||||
krate: CrateId,
|
||||
chalk_id: chalk_ir::ImplId,
|
||||
impl_id: ImplId,
|
||||
impl_block: ImplBlock,
|
||||
) -> Option<Arc<ImplDatum<ChalkIr>>> {
|
||||
let generic_params = db.generic_params(impl_block.id.into());
|
||||
let trait_ref = match db.impl_ty(impl_id) {
|
||||
ImplTy::TraitRef(it) => it,
|
||||
ImplTy::Inherent(_) => return None,
|
||||
};
|
||||
let impl_data = db.impl_data(impl_id);
|
||||
|
||||
let generic_params = db.generic_params(impl_id.into());
|
||||
let bound_vars = Substs::bound_vars(&generic_params);
|
||||
let trait_ref = impl_block.target_trait_ref(db)?.subst(&bound_vars);
|
||||
let trait_ref = trait_ref.subst(&bound_vars);
|
||||
let trait_ = trait_ref.trait_;
|
||||
let impl_type = if impl_block.krate(db) == krate {
|
||||
let impl_type = if impl_id.module(db).krate == krate {
|
||||
chalk_rust_ir::ImplType::Local
|
||||
} else {
|
||||
chalk_rust_ir::ImplType::External
|
||||
};
|
||||
let where_clauses = convert_where_clauses(db, impl_block.into(), &bound_vars);
|
||||
let negative = impl_block.is_negative(db);
|
||||
let where_clauses = convert_where_clauses(db, impl_id.into(), &bound_vars);
|
||||
let negative = impl_data.is_negative;
|
||||
debug!(
|
||||
"impl {:?}: {}{} where {:?}",
|
||||
impl_id,
|
||||
chalk_id,
|
||||
if negative { "!" } else { "" },
|
||||
trait_ref.display(db),
|
||||
where_clauses
|
||||
|
@ -661,16 +663,18 @@ fn impl_block_datum(
|
|||
};
|
||||
|
||||
let impl_datum_bound = chalk_rust_ir::ImplDatumBound { trait_ref, where_clauses };
|
||||
let associated_ty_value_ids = impl_block
|
||||
.items(db)
|
||||
.into_iter()
|
||||
let trait_data = db.trait_data(trait_);
|
||||
let associated_ty_value_ids = impl_data
|
||||
.items
|
||||
.iter()
|
||||
.filter_map(|item| match item {
|
||||
crate::AssocItem::TypeAlias(type_alias) => Some(type_alias),
|
||||
AssocItemId::TypeAliasId(type_alias) => Some(*type_alias),
|
||||
_ => None,
|
||||
})
|
||||
.filter(|type_alias| {
|
||||
.filter(|&type_alias| {
|
||||
// don't include associated types that don't exist in the trait
|
||||
trait_.associated_type_by_name(db, &type_alias.name(db)).is_some()
|
||||
let name = &db.type_alias_data(type_alias).name;
|
||||
trait_data.associated_type_by_name(name).is_some()
|
||||
})
|
||||
.map(|type_alias| AssocTyValue::TypeAlias(type_alias).to_chalk(db))
|
||||
.collect();
|
||||
|
@ -701,7 +705,7 @@ fn invalid_impl_datum() -> Arc<ImplDatum<ChalkIr>> {
|
|||
|
||||
fn closure_fn_trait_impl_datum(
|
||||
db: &impl HirDatabase,
|
||||
krate: Crate,
|
||||
krate: CrateId,
|
||||
data: super::ClosureFnTraitImplData,
|
||||
) -> Option<Arc<ImplDatum<ChalkIr>>> {
|
||||
// for some closure |X, Y| -> Z:
|
||||
|
@ -713,10 +717,10 @@ fn closure_fn_trait_impl_datum(
|
|||
// and don't want to return a valid value only to find out later that FnOnce
|
||||
// is broken
|
||||
let fn_once_trait = get_fn_trait(db, krate, super::FnTrait::FnOnce)?;
|
||||
fn_once_trait.associated_type_by_name(db, &name::OUTPUT_TYPE)?;
|
||||
let _output = db.trait_data(fn_once_trait).associated_type_by_name(&name::OUTPUT_TYPE)?;
|
||||
|
||||
let num_args: u16 = match &db.body(data.def.into())[data.expr] {
|
||||
crate::expr::Expr::Lambda { args, .. } => args.len() as u16,
|
||||
Expr::Lambda { args, .. } => args.len() as u16,
|
||||
_ => {
|
||||
log::warn!("closure for closure type {:?} not found", data);
|
||||
0
|
||||
|
@ -735,7 +739,7 @@ fn closure_fn_trait_impl_datum(
|
|||
let self_ty = Ty::apply_one(TypeCtor::Closure { def: data.def, expr: data.expr }, sig_ty);
|
||||
|
||||
let trait_ref = TraitRef {
|
||||
trait_,
|
||||
trait_: trait_.into(),
|
||||
substs: Substs::build_for_def(db, trait_).push(self_ty).push(arg_ty).build(),
|
||||
};
|
||||
|
||||
|
@ -758,7 +762,7 @@ fn closure_fn_trait_impl_datum(
|
|||
|
||||
pub(crate) fn associated_ty_value_query(
|
||||
db: &impl HirDatabase,
|
||||
krate: Crate,
|
||||
krate: CrateId,
|
||||
id: chalk_rust_ir::AssociatedTyValueId,
|
||||
) -> Arc<chalk_rust_ir::AssociatedTyValue<ChalkIr>> {
|
||||
let data: AssocTyValue = from_chalk(db, id);
|
||||
|
@ -774,24 +778,31 @@ pub(crate) fn associated_ty_value_query(
|
|||
|
||||
fn type_alias_associated_ty_value(
|
||||
db: &impl HirDatabase,
|
||||
_krate: Crate,
|
||||
type_alias: TypeAlias,
|
||||
_krate: CrateId,
|
||||
type_alias: TypeAliasId,
|
||||
) -> Arc<AssociatedTyValue<ChalkIr>> {
|
||||
let impl_block = type_alias.impl_block(db).expect("assoc ty value should be in impl");
|
||||
let impl_id = Impl::ImplBlock(impl_block).to_chalk(db);
|
||||
let trait_ = impl_block
|
||||
.target_trait_ref(db)
|
||||
.expect("assoc ty value should not exist") // we don't return any assoc ty values if the impl'd trait can't be resolved
|
||||
.trait_;
|
||||
let assoc_ty = trait_
|
||||
.associated_type_by_name(db, &type_alias.name(db))
|
||||
let type_alias_data = db.type_alias_data(type_alias);
|
||||
let impl_id = match type_alias.lookup(db).container {
|
||||
ContainerId::ImplId(it) => it,
|
||||
_ => panic!("assoc ty value should be in impl"),
|
||||
};
|
||||
|
||||
let trait_ref = match db.impl_ty(impl_id) {
|
||||
ImplTy::TraitRef(it) => it,
|
||||
// we don't return any assoc ty values if the impl'd trait can't be resolved
|
||||
ImplTy::Inherent(_) => panic!("assoc ty value should not exist"),
|
||||
};
|
||||
|
||||
let assoc_ty = db
|
||||
.trait_data(trait_ref.trait_)
|
||||
.associated_type_by_name(&type_alias_data.name)
|
||||
.expect("assoc ty value should not exist"); // validated when building the impl data as well
|
||||
let generic_params = db.generic_params(impl_block.id.into());
|
||||
let generic_params = db.generic_params(impl_id.into());
|
||||
let bound_vars = Substs::bound_vars(&generic_params);
|
||||
let ty = db.type_for_def(type_alias.into(), crate::ty::Namespace::Types).subst(&bound_vars);
|
||||
let ty = db.ty(type_alias.into()).subst(&bound_vars);
|
||||
let value_bound = chalk_rust_ir::AssociatedTyValueBound { ty: ty.to_chalk(db) };
|
||||
let value = chalk_rust_ir::AssociatedTyValue {
|
||||
impl_id,
|
||||
impl_id: Impl::ImplBlock(impl_id.into()).to_chalk(db),
|
||||
associated_ty_id: assoc_ty.to_chalk(db),
|
||||
value: make_binders(value_bound, bound_vars.len()),
|
||||
};
|
||||
|
@ -800,13 +811,13 @@ fn type_alias_associated_ty_value(
|
|||
|
||||
fn closure_fn_trait_output_assoc_ty_value(
|
||||
db: &impl HirDatabase,
|
||||
krate: Crate,
|
||||
krate: CrateId,
|
||||
data: super::ClosureFnTraitImplData,
|
||||
) -> Arc<AssociatedTyValue<ChalkIr>> {
|
||||
let impl_id = Impl::ClosureFnTraitImpl(data.clone()).to_chalk(db);
|
||||
|
||||
let num_args: u16 = match &db.body(data.def.into())[data.expr] {
|
||||
crate::expr::Expr::Lambda { args, .. } => args.len() as u16,
|
||||
Expr::Lambda { args, .. } => args.len() as u16,
|
||||
_ => {
|
||||
log::warn!("closure for closure type {:?} not found", data);
|
||||
0
|
||||
|
@ -818,8 +829,9 @@ fn closure_fn_trait_output_assoc_ty_value(
|
|||
let fn_once_trait =
|
||||
get_fn_trait(db, krate, super::FnTrait::FnOnce).expect("assoc ty value should not exist");
|
||||
|
||||
let output_ty_id = fn_once_trait
|
||||
.associated_type_by_name(db, &name::OUTPUT_TYPE)
|
||||
let output_ty_id = db
|
||||
.trait_data(fn_once_trait)
|
||||
.associated_type_by_name(&name::OUTPUT_TYPE)
|
||||
.expect("assoc ty value should not exist");
|
||||
|
||||
let value_bound = chalk_rust_ir::AssociatedTyValueBound { ty: output_ty.to_chalk(db) };
|
||||
|
@ -832,10 +844,14 @@ fn closure_fn_trait_output_assoc_ty_value(
|
|||
Arc::new(value)
|
||||
}
|
||||
|
||||
fn get_fn_trait(db: &impl HirDatabase, krate: Crate, fn_trait: super::FnTrait) -> Option<Trait> {
|
||||
let target = db.lang_item(krate.crate_id, fn_trait.lang_item_name().into())?;
|
||||
fn get_fn_trait(
|
||||
db: &impl HirDatabase,
|
||||
krate: CrateId,
|
||||
fn_trait: super::FnTrait,
|
||||
) -> Option<TraitId> {
|
||||
let target = db.lang_item(krate, fn_trait.lang_item_name().into())?;
|
||||
match target {
|
||||
LangItemTarget::TraitId(t) => Some(t.into()),
|
||||
LangItemTarget::TraitId(t) => Some(t),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -847,38 +863,38 @@ fn id_to_chalk<T: InternKey>(salsa_id: T) -> chalk_ir::RawId {
|
|||
chalk_ir::RawId { index: salsa_id.as_intern_id().as_u32() }
|
||||
}
|
||||
|
||||
impl From<chalk_ir::StructId> for crate::ty::TypeCtorId {
|
||||
impl From<chalk_ir::StructId> for crate::TypeCtorId {
|
||||
fn from(struct_id: chalk_ir::StructId) -> Self {
|
||||
id_from_chalk(struct_id.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<crate::ty::TypeCtorId> for chalk_ir::StructId {
|
||||
fn from(type_ctor_id: crate::ty::TypeCtorId) -> Self {
|
||||
impl From<crate::TypeCtorId> for chalk_ir::StructId {
|
||||
fn from(type_ctor_id: crate::TypeCtorId) -> Self {
|
||||
chalk_ir::StructId(id_to_chalk(type_ctor_id))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<chalk_ir::ImplId> for crate::ty::traits::GlobalImplId {
|
||||
impl From<chalk_ir::ImplId> for crate::traits::GlobalImplId {
|
||||
fn from(impl_id: chalk_ir::ImplId) -> Self {
|
||||
id_from_chalk(impl_id.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<crate::ty::traits::GlobalImplId> for chalk_ir::ImplId {
|
||||
fn from(impl_id: crate::ty::traits::GlobalImplId) -> Self {
|
||||
impl From<crate::traits::GlobalImplId> for chalk_ir::ImplId {
|
||||
fn from(impl_id: crate::traits::GlobalImplId) -> Self {
|
||||
chalk_ir::ImplId(id_to_chalk(impl_id))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<chalk_rust_ir::AssociatedTyValueId> for crate::ty::traits::AssocTyValueId {
|
||||
impl From<chalk_rust_ir::AssociatedTyValueId> for crate::traits::AssocTyValueId {
|
||||
fn from(id: chalk_rust_ir::AssociatedTyValueId) -> Self {
|
||||
id_from_chalk(id.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<crate::ty::traits::AssocTyValueId> for chalk_rust_ir::AssociatedTyValueId {
|
||||
fn from(assoc_ty_value_id: crate::ty::traits::AssocTyValueId) -> Self {
|
||||
impl From<crate::traits::AssocTyValueId> for chalk_rust_ir::AssociatedTyValueId {
|
||||
fn from(assoc_ty_value_id: crate::traits::AssocTyValueId) -> Self {
|
||||
chalk_rust_ir::AssociatedTyValueId(id_to_chalk(assoc_ty_value_id))
|
||||
}
|
||||
}
|
84
crates/ra_hir_ty/src/utils.rs
Normal file
84
crates/ra_hir_ty/src/utils.rs
Normal file
|
@ -0,0 +1,84 @@
|
|||
//! Helper functions for working with def, which don't need to be a separate
|
||||
//! query, but can't be computed directly from `*Data` (ie, which need a `db`).
|
||||
use std::sync::Arc;
|
||||
|
||||
use hir_def::{
|
||||
adt::VariantData,
|
||||
db::DefDatabase,
|
||||
resolver::{HasResolver, TypeNs},
|
||||
type_ref::TypeRef,
|
||||
TraitId, TypeAliasId, VariantId,
|
||||
};
|
||||
use hir_expand::name::{self, Name};
|
||||
|
||||
// FIXME: this is wrong, b/c it can't express `trait T: PartialEq<()>`.
|
||||
// We should return a `TraitREf` here.
|
||||
fn direct_super_traits(db: &impl DefDatabase, trait_: TraitId) -> Vec<TraitId> {
|
||||
let resolver = trait_.resolver(db);
|
||||
// returning the iterator directly doesn't easily work because of
|
||||
// lifetime problems, but since there usually shouldn't be more than a
|
||||
// few direct traits this should be fine (we could even use some kind of
|
||||
// SmallVec if performance is a concern)
|
||||
db.generic_params(trait_.into())
|
||||
.where_predicates
|
||||
.iter()
|
||||
.filter_map(|pred| match &pred.type_ref {
|
||||
TypeRef::Path(p) if p.as_ident() == Some(&name::SELF_TYPE) => pred.bound.as_path(),
|
||||
_ => None,
|
||||
})
|
||||
.filter_map(|path| match resolver.resolve_path_in_type_ns_fully(db, path) {
|
||||
Some(TypeNs::TraitId(t)) => Some(t),
|
||||
_ => None,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Returns an iterator over the whole super trait hierarchy (including the
|
||||
/// trait itself).
|
||||
pub(super) fn all_super_traits(db: &impl DefDatabase, trait_: TraitId) -> Vec<TraitId> {
|
||||
// we need to take care a bit here to avoid infinite loops in case of cycles
|
||||
// (i.e. if we have `trait A: B; trait B: A;`)
|
||||
let mut result = vec![trait_];
|
||||
let mut i = 0;
|
||||
while i < result.len() {
|
||||
let t = result[i];
|
||||
// yeah this is quadratic, but trait hierarchies should be flat
|
||||
// enough that this doesn't matter
|
||||
for tt in direct_super_traits(db, t) {
|
||||
if !result.contains(&tt) {
|
||||
result.push(tt);
|
||||
}
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
pub(super) fn associated_type_by_name_including_super_traits(
|
||||
db: &impl DefDatabase,
|
||||
trait_: TraitId,
|
||||
name: &Name,
|
||||
) -> Option<TypeAliasId> {
|
||||
all_super_traits(db, trait_)
|
||||
.into_iter()
|
||||
.find_map(|t| db.trait_data(t).associated_type_by_name(name))
|
||||
}
|
||||
|
||||
pub(super) fn variant_data(db: &impl DefDatabase, var: VariantId) -> Arc<VariantData> {
|
||||
match var {
|
||||
VariantId::StructId(it) => db.struct_data(it).variant_data.clone(),
|
||||
VariantId::UnionId(it) => db.union_data(it).variant_data.clone(),
|
||||
VariantId::EnumVariantId(it) => {
|
||||
db.enum_data(it.parent).variants[it.local_id].variant_data.clone()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper for mutating `Arc<[T]>` (i.e. `Arc::make_mut` for Arc slices).
|
||||
/// The underlying values are cloned if there are other strong references.
|
||||
pub(crate) fn make_mut_slice<T: Clone>(a: &mut Arc<[T]>) -> &mut [T] {
|
||||
if Arc::get_mut(a).is_none() {
|
||||
*a = a.iter().cloned().collect();
|
||||
}
|
||||
Arc::get_mut(a).unwrap()
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
edition = "2018"
|
||||
name = "ra_ide_api"
|
||||
name = "ra_ide"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
|
||||
|
@ -32,7 +32,7 @@ ra_prof = { path = "../ra_prof" }
|
|||
test_utils = { path = "../test_utils" }
|
||||
ra_assists = { path = "../ra_assists" }
|
||||
|
||||
# ra_ide_api should depend only on the top-level `hir` package. if you need
|
||||
# ra_ide should depend only on the top-level `hir` package. if you need
|
||||
# something from some `hir_xxx` subpackage, reexport the API via `hir`.
|
||||
hir = { path = "../ra_hir", package = "ra_hir" }
|
||||
|
|
@ -18,22 +18,22 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
|
|||
// Find the calling expression and it's NameRef
|
||||
let calling_node = FnCallNode::with_node(&syntax, position.offset)?;
|
||||
let name_ref = calling_node.name_ref()?;
|
||||
let name_ref = hir::Source::new(position.file_id.into(), name_ref.syntax());
|
||||
|
||||
let analyzer = hir::SourceAnalyzer::new(
|
||||
db,
|
||||
hir::Source::new(position.file_id.into(), name_ref.syntax()),
|
||||
None,
|
||||
);
|
||||
let analyzer = hir::SourceAnalyzer::new(db, name_ref, None);
|
||||
let (mut call_info, has_self) = match &calling_node {
|
||||
FnCallNode::CallExpr(expr) => {
|
||||
//FIXME: apply subst
|
||||
let (callable_def, _subst) = analyzer.type_of(db, &expr.expr()?)?.as_callable()?;
|
||||
//FIXME: Type::as_callable is broken
|
||||
let callable_def = analyzer.type_of(db, &expr.expr()?)?.as_callable()?;
|
||||
match callable_def {
|
||||
hir::CallableDef::Function(it) => {
|
||||
(CallInfo::with_fn(db, it), it.has_self_param(db))
|
||||
hir::CallableDef::FunctionId(it) => {
|
||||
let fn_def = it.into();
|
||||
(CallInfo::with_fn(db, fn_def), fn_def.has_self_param(db))
|
||||
}
|
||||
hir::CallableDef::StructId(it) => (CallInfo::with_struct(db, it.into())?, false),
|
||||
hir::CallableDef::EnumVariantId(it) => {
|
||||
(CallInfo::with_enum_variant(db, it.into())?, false)
|
||||
}
|
||||
hir::CallableDef::Struct(it) => (CallInfo::with_struct(db, it)?, false),
|
||||
hir::CallableDef::EnumVariant(it) => (CallInfo::with_enum_variant(db, it)?, false),
|
||||
}
|
||||
}
|
||||
FnCallNode::MethodCallExpr(expr) => {
|
||||
|
@ -41,7 +41,7 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
|
|||
(CallInfo::with_fn(db, function), function.has_self_param(db))
|
||||
}
|
||||
FnCallNode::MacroCallExpr(expr) => {
|
||||
let macro_def = analyzer.resolve_macro_call(db, &expr)?;
|
||||
let macro_def = analyzer.resolve_macro_call(db, name_ref.with_value(&expr))?;
|
||||
(CallInfo::with_macro(db, macro_def)?, false)
|
||||
}
|
||||
};
|
|
@ -171,7 +171,7 @@ impl RootDatabase {
|
|||
log::info!("apply_change {:?}", change);
|
||||
{
|
||||
let _p = profile("RootDatabase::apply_change/cancellation");
|
||||
self.salsa_runtime().synthetic_write(Durability::LOW);
|
||||
self.salsa_runtime_mut().synthetic_write(Durability::LOW);
|
||||
}
|
||||
if !change.new_roots.is_empty() {
|
||||
let mut local_roots = Vec::clone(&self.local_roots());
|
||||
|
@ -323,7 +323,8 @@ impl RootDatabase {
|
|||
hir::db::DocumentationQuery
|
||||
hir::db::ExprScopesQuery
|
||||
hir::db::InferQuery
|
||||
hir::db::TypeForDefQuery
|
||||
hir::db::TyQuery
|
||||
hir::db::ValueTyQuery
|
||||
hir::db::FieldTypesQuery
|
||||
hir::db::CallableItemSignatureQuery
|
||||
hir::db::GenericPredicatesQuery
|
|
@ -1,6 +1,6 @@
|
|||
//! FIXME: write short doc here
|
||||
|
||||
use hir::{Adt, Ty, TypeCtor};
|
||||
use hir::Type;
|
||||
|
||||
use crate::completion::completion_item::CompletionKind;
|
||||
use crate::{
|
||||
|
@ -22,12 +22,12 @@ pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
};
|
||||
|
||||
if !ctx.is_call {
|
||||
complete_fields(acc, ctx, receiver_ty.clone());
|
||||
complete_fields(acc, ctx, &receiver_ty);
|
||||
}
|
||||
complete_methods(acc, ctx, receiver_ty.clone());
|
||||
complete_methods(acc, ctx, &receiver_ty);
|
||||
|
||||
// Suggest .await syntax for types that implement Future trait
|
||||
if ctx.analyzer.impls_future(ctx.db, receiver_ty) {
|
||||
if ctx.analyzer.impls_future(ctx.db, receiver_ty.into_ty()) {
|
||||
CompletionItem::new(CompletionKind::Keyword, ctx.source_range(), "await")
|
||||
.detail("expr.await")
|
||||
.insert_text("await")
|
||||
|
@ -35,28 +35,18 @@ pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
}
|
||||
}
|
||||
|
||||
fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty) {
|
||||
for receiver in ctx.analyzer.autoderef(ctx.db, receiver) {
|
||||
if let Ty::Apply(a_ty) = receiver {
|
||||
match a_ty.ctor {
|
||||
TypeCtor::Adt(Adt::Struct(s)) => {
|
||||
for field in s.fields(ctx.db) {
|
||||
acc.add_field(ctx, field, &a_ty.parameters);
|
||||
}
|
||||
}
|
||||
// FIXME unions
|
||||
TypeCtor::Tuple { .. } => {
|
||||
for (i, ty) in a_ty.parameters.iter().enumerate() {
|
||||
acc.add_tuple_field(ctx, i, ty);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
};
|
||||
fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) {
|
||||
for receiver in receiver.autoderef(ctx.db) {
|
||||
for (field, ty) in receiver.fields(ctx.db) {
|
||||
acc.add_field(ctx, field, &ty);
|
||||
}
|
||||
for (i, ty) in receiver.tuple_fields(ctx.db).into_iter().enumerate() {
|
||||
acc.add_tuple_field(ctx, i, &ty);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn complete_methods(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty) {
|
||||
fn complete_methods(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) {
|
||||
let mut seen_methods = FxHashSet::default();
|
||||
ctx.analyzer.iterate_method_candidates(ctx.db, receiver, None, |_ty, func| {
|
||||
if func.has_self_param(ctx.db) && seen_methods.insert(func.name(ctx.db)) {
|
|
@ -50,7 +50,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
hir::ModuleDef::TypeAlias(a) => a.ty(ctx.db),
|
||||
_ => unreachable!(),
|
||||
};
|
||||
ctx.analyzer.iterate_path_candidates(ctx.db, ty.clone(), None, |_ty, item| {
|
||||
ctx.analyzer.iterate_path_candidates(ctx.db, &ty, None, |_ty, item| {
|
||||
match item {
|
||||
hir::AssocItem::Function(func) => {
|
||||
if !func.has_self_param(ctx.db) {
|
|
@ -1,6 +1,5 @@
|
|||
//! FIXME: write short doc here
|
||||
|
||||
use hir::{Ty, TypeCtor};
|
||||
use ra_syntax::{ast::AstNode, TextRange, TextUnit};
|
||||
use ra_text_edit::TextEdit;
|
||||
|
||||
|
@ -30,9 +29,12 @@ pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
dot_receiver.syntax().text().to_string()
|
||||
};
|
||||
|
||||
let receiver_ty = ctx.analyzer.type_of(ctx.db, &dot_receiver);
|
||||
let receiver_ty = match ctx.analyzer.type_of(ctx.db, &dot_receiver) {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
|
||||
if is_bool_or_unknown(receiver_ty) {
|
||||
if receiver_ty.is_bool() || receiver_ty.is_unknown() {
|
||||
postfix_snippet(ctx, "if", "if expr {}", &format!("if {} {{$0}}", receiver_text))
|
||||
.add_to(acc);
|
||||
postfix_snippet(
|
||||
|
@ -75,14 +77,6 @@ fn postfix_snippet(ctx: &CompletionContext, label: &str, detail: &str, snippet:
|
|||
.snippet_edit(edit)
|
||||
}
|
||||
|
||||
fn is_bool_or_unknown(ty: Option<Ty>) -> bool {
|
||||
match &ty {
|
||||
Some(Ty::Apply(app)) if app.ctor == TypeCtor::Bool => true,
|
||||
Some(Ty::Unknown) | None => true,
|
||||
Some(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::assert_debug_snapshot;
|
|
@ -1,7 +1,5 @@
|
|||
//! FIXME: write short doc here
|
||||
|
||||
use hir::Substs;
|
||||
|
||||
use crate::completion::{CompletionContext, Completions};
|
||||
|
||||
/// Complete fields in fields literals.
|
||||
|
@ -15,10 +13,9 @@ pub(super) fn complete_record_literal(acc: &mut Completions, ctx: &CompletionCon
|
|||
Some(it) => it,
|
||||
_ => return,
|
||||
};
|
||||
let substs = &ty.substs().unwrap_or_else(Substs::empty);
|
||||
|
||||
for field in variant.fields(ctx.db) {
|
||||
acc.add_field(ctx, field, substs);
|
||||
for (field, field_ty) in ty.variant_fields(ctx.db, variant) {
|
||||
acc.add_field(ctx, field, &field_ty);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,7 +1,5 @@
|
|||
//! FIXME: write short doc here
|
||||
|
||||
use hir::Substs;
|
||||
|
||||
use crate::completion::{CompletionContext, Completions};
|
||||
|
||||
pub(super) fn complete_record_pattern(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
|
@ -14,10 +12,9 @@ pub(super) fn complete_record_pattern(acc: &mut Completions, ctx: &CompletionCon
|
|||
Some(it) => it,
|
||||
_ => return,
|
||||
};
|
||||
let substs = &ty.substs().unwrap_or_else(Substs::empty);
|
||||
|
||||
for field in variant.fields(ctx.db) {
|
||||
acc.add_field(ctx, field, substs);
|
||||
for (field, field_ty) in ty.variant_fields(ctx.db, variant) {
|
||||
acc.add_field(ctx, field, &field_ty);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,12 +1,12 @@
|
|||
//! This modules takes care of rendering various definitions as completion items.
|
||||
|
||||
use hir::{db::HirDatabase, Docs, HasAttrs, HasSource, HirDisplay, ScopeDef, Ty, TypeWalk};
|
||||
use hir::{db::HirDatabase, Docs, HasAttrs, HasSource, HirDisplay, ScopeDef, Type};
|
||||
use join_to_string::join;
|
||||
use ra_syntax::ast::NameOwner;
|
||||
use test_utils::tested_by;
|
||||
|
||||
use crate::completion::{
|
||||
db, CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions,
|
||||
CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions,
|
||||
};
|
||||
|
||||
use crate::display::{const_label, function_label, macro_label, type_label};
|
||||
|
@ -16,7 +16,7 @@ impl Completions {
|
|||
&mut self,
|
||||
ctx: &CompletionContext,
|
||||
field: hir::StructField,
|
||||
substs: &hir::Substs,
|
||||
ty: &Type,
|
||||
) {
|
||||
let is_deprecated = is_deprecated(field, ctx.db);
|
||||
CompletionItem::new(
|
||||
|
@ -25,13 +25,13 @@ impl Completions {
|
|||
field.name(ctx.db).to_string(),
|
||||
)
|
||||
.kind(CompletionItemKind::Field)
|
||||
.detail(field.ty(ctx.db).subst(substs).display(ctx.db).to_string())
|
||||
.detail(ty.display(ctx.db).to_string())
|
||||
.set_documentation(field.docs(ctx.db))
|
||||
.set_deprecated(is_deprecated)
|
||||
.add_to(self);
|
||||
}
|
||||
|
||||
pub(crate) fn add_tuple_field(&mut self, ctx: &CompletionContext, field: usize, ty: &hir::Ty) {
|
||||
pub(crate) fn add_tuple_field(&mut self, ctx: &CompletionContext, field: usize, ty: &Type) {
|
||||
CompletionItem::new(CompletionKind::Reference, ctx.source_range(), field.to_string())
|
||||
.kind(CompletionItemKind::Field)
|
||||
.detail(ty.display(ctx.db).to_string())
|
||||
|
@ -98,7 +98,7 @@ impl Completions {
|
|||
CompletionItem::new(completion_kind, ctx.source_range(), local_name.clone());
|
||||
if let ScopeDef::Local(local) = resolution {
|
||||
let ty = local.ty(ctx.db);
|
||||
if ty != Ty::Unknown {
|
||||
if !ty.is_unknown() {
|
||||
completion_item = completion_item.detail(ty.display(ctx.db).to_string());
|
||||
}
|
||||
};
|
||||
|
@ -108,19 +108,17 @@ impl Completions {
|
|||
&& !ctx.has_type_args
|
||||
&& ctx.db.feature_flags.get("completion.insertion.add-call-parenthesis")
|
||||
{
|
||||
let generic_def: Option<hir::GenericDef> = match resolution {
|
||||
ScopeDef::ModuleDef(Adt(it)) => Some((*it).into()),
|
||||
ScopeDef::ModuleDef(TypeAlias(it)) => Some((*it).into()),
|
||||
_ => None,
|
||||
let has_non_default_type_params = match resolution {
|
||||
ScopeDef::ModuleDef(Adt(it)) => it.has_non_default_type_params(ctx.db),
|
||||
ScopeDef::ModuleDef(TypeAlias(it)) => it.has_non_default_type_params(ctx.db),
|
||||
_ => false,
|
||||
};
|
||||
if let Some(def) = generic_def {
|
||||
if has_non_default_type_params(def, ctx.db) {
|
||||
tested_by!(inserts_angle_brackets_for_generics);
|
||||
completion_item = completion_item
|
||||
.lookup_by(local_name.clone())
|
||||
.label(format!("{}<…>", local_name))
|
||||
.insert_snippet(format!("{}<$0>", local_name));
|
||||
}
|
||||
if has_non_default_type_params {
|
||||
tested_by!(inserts_angle_brackets_for_generics);
|
||||
completion_item = completion_item
|
||||
.lookup_by(local_name.clone())
|
||||
.label(format!("{}<…>", local_name))
|
||||
.insert_snippet(format!("{}<$0>", local_name));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -269,10 +267,7 @@ impl Completions {
|
|||
|
||||
pub(crate) fn add_enum_variant(&mut self, ctx: &CompletionContext, variant: hir::EnumVariant) {
|
||||
let is_deprecated = is_deprecated(variant, ctx.db);
|
||||
let name = match variant.name(ctx.db) {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
let name = variant.name(ctx.db);
|
||||
let detail_types = variant.fields(ctx.db).into_iter().map(|field| field.ty(ctx.db));
|
||||
let detail = join(detail_types.map(|t| t.display(ctx.db).to_string()))
|
||||
.separator(", ")
|
||||
|
@ -291,11 +286,6 @@ fn is_deprecated(node: impl HasAttrs, db: &impl HirDatabase) -> bool {
|
|||
node.attrs(db).by_key("deprecated").exists()
|
||||
}
|
||||
|
||||
fn has_non_default_type_params(def: hir::GenericDef, db: &db::RootDatabase) -> bool {
|
||||
let subst = db.generic_defaults(def);
|
||||
subst.iter().any(|ty| ty == &Ty::Unknown)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::assert_debug_snapshot;
|
|
@ -65,6 +65,9 @@ impl salsa::Database for RootDatabase {
|
|||
fn salsa_runtime(&self) -> &salsa::Runtime<RootDatabase> {
|
||||
&self.runtime
|
||||
}
|
||||
fn salsa_runtime_mut(&mut self) -> &mut salsa::Runtime<Self> {
|
||||
&mut self.runtime
|
||||
}
|
||||
fn on_propagated_panic(&self) -> ! {
|
||||
Canceled::throw()
|
||||
}
|
|
@ -93,12 +93,9 @@ impl FunctionSignature {
|
|||
_ => (),
|
||||
};
|
||||
|
||||
let parent_name = match variant.parent_enum(db).name(db) {
|
||||
Some(name) => name.to_string(),
|
||||
None => "missing".into(),
|
||||
};
|
||||
let parent_name = variant.parent_enum(db).name(db).to_string();
|
||||
|
||||
let name = format!("{}::{}", parent_name, variant.name(db).unwrap());
|
||||
let name = format!("{}::{}", parent_name, variant.name(db));
|
||||
|
||||
let params = variant
|
||||
.fields(db)
|
|
@ -19,6 +19,12 @@ impl ShortLabel for ast::StructDef {
|
|||
}
|
||||
}
|
||||
|
||||
impl ShortLabel for ast::UnionDef {
|
||||
fn short_label(&self) -> Option<String> {
|
||||
short_label_from_node(self, "union ")
|
||||
}
|
||||
}
|
||||
|
||||
impl ShortLabel for ast::EnumDef {
|
||||
fn short_label(&self) -> Option<String> {
|
||||
short_label_from_node(self, "enum ")
|
|
@ -269,4 +269,27 @@ fn some_thing() -> u32 {
|
|||
assert_eq!(res.name, "foo");
|
||||
assert_snapshot!(res.expansion, @r###"bar!()"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn macro_expand_with_dollar_crate() {
|
||||
let res = check_expand_macro(
|
||||
r#"
|
||||
//- /lib.rs
|
||||
#[macro_export]
|
||||
macro_rules! bar {
|
||||
() => {0};
|
||||
}
|
||||
macro_rules! foo {
|
||||
() => {$crate::bar!()};
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let res = fo<|>o!();
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_eq!(res.name, "foo");
|
||||
assert_snapshot!(res.expansion, @r###"0"###);
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue