Merge branch 'master' into feature/themes

This commit is contained in:
Seivan Heidari 2019-12-23 15:35:31 +01:00
commit b21d9337d9
233 changed files with 14900 additions and 10781 deletions

View file

@ -14,6 +14,7 @@ jobs:
env: env:
RUSTFLAGS: -D warnings RUSTFLAGS: -D warnings
CARGO_INCREMENTAL: 0 CARGO_INCREMENTAL: 0
RUN_SLOW_TESTS: 1
steps: steps:
- name: Checkout repository - name: Checkout repository
@ -46,9 +47,10 @@ jobs:
- name: Prepare build directory for cache - name: Prepare build directory for cache
run: | run: |
find ./target/debug -maxdepth 1 -type f -delete && \ find ./target/debug -maxdepth 1 -type f -delete \
rm -fr ./target/debug/{deps,.fingerprint}/{*ra_*,*heavy_test*,*gen_lsp*,*thread_worker*} && \ && rm -fr ./target/debug/{deps,.fingerprint}/{*ra_*,*heavy_test*,*gen_lsp*,*thread_worker*} \
rm -f ./target/.rustc_info.json && rm -f ./target/.rustc_info.json \
&& rm ./target/.slow_tests_cookie
type-script: type-script:
name: TypeScript name: TypeScript

2
.vscode/launch.json vendored
View file

@ -16,7 +16,7 @@
"env": { "env": {
"__RA_LSP_SERVER_DEBUG": "${workspaceFolder}/target/debug/ra_lsp_server" "__RA_LSP_SERVER_DEBUG": "${workspaceFolder}/target/debug/ra_lsp_server"
}, },
"outFiles": ["${workspaceFolder}/editors/code/out/**/*.js"], "outFiles": ["${workspaceFolder}/editors/code/bundle/**/*.js"],
"preLaunchTask": "Build All" "preLaunchTask": "Build All"
}, },
{ {

361
Cargo.lock generated
View file

@ -10,7 +10,12 @@ dependencies = [
[[package]] [[package]]
name = "anyhow" name = "anyhow"
version = "1.0.24" version = "1.0.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "anymap"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
@ -23,7 +28,7 @@ name = "atty"
version = "0.2.13" version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -39,7 +44,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"backtrace-sys 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)", "backtrace-sys 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -48,8 +53,8 @@ name = "backtrace-sys"
version = "0.1.32" version = "0.1.32"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)", "cc 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -101,18 +106,18 @@ dependencies = [
[[package]] [[package]]
name = "cargo_metadata" name = "cargo_metadata"
version = "0.9.0" version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.42 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "cc" name = "cc"
version = "1.0.47" version = "1.0.48"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
@ -123,38 +128,37 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "chalk-derive" name = "chalk-derive"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30#095cd38a4f16337913bba487f2055b9ca0179f30" source = "git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5#ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5"
dependencies = [ dependencies = [
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", "syn 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "chalk-engine" name = "chalk-engine"
version = "0.9.0" version = "0.9.0"
source = "git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30#095cd38a4f16337913bba487f2055b9ca0179f30" source = "git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5#ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5"
dependencies = [ dependencies = [
"chalk-macros 0.1.1 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)", "chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"stacker 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "chalk-ir" name = "chalk-ir"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30#095cd38a4f16337913bba487f2055b9ca0179f30" source = "git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5#ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5"
dependencies = [ dependencies = [
"chalk-derive 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)", "chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)",
"chalk-engine 0.9.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)", "chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)",
"chalk-macros 0.1.1 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)", "chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)",
"lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)", "lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "chalk-macros" name = "chalk-macros"
version = "0.1.1" version = "0.1.1"
source = "git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30#095cd38a4f16337913bba487f2055b9ca0179f30" source = "git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5#ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5"
dependencies = [ dependencies = [
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -162,40 +166,30 @@ dependencies = [
[[package]] [[package]]
name = "chalk-rust-ir" name = "chalk-rust-ir"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30#095cd38a4f16337913bba487f2055b9ca0179f30" source = "git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5#ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5"
dependencies = [ dependencies = [
"chalk-derive 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)", "chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)",
"chalk-engine 0.9.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)", "chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)",
"chalk-ir 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)", "chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)",
"chalk-macros 0.1.1 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)", "chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)",
] ]
[[package]] [[package]]
name = "chalk-solve" name = "chalk-solve"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30#095cd38a4f16337913bba487f2055b9ca0179f30" source = "git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5#ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5"
dependencies = [ dependencies = [
"chalk-derive 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)", "chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)",
"chalk-engine 0.9.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)", "chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)",
"chalk-ir 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)", "chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)",
"chalk-macros 0.1.1 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)", "chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)",
"chalk-rust-ir 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)", "chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)",
"ena 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)", "ena 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
"petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", "petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "chrono"
version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)",
"num-traits 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "clicolors-control" name = "clicolors-control"
version = "1.0.1" version = "1.0.1"
@ -203,7 +197,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -223,7 +217,7 @@ dependencies = [
"clicolors-control 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "clicolors-control 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"encode_unicode 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "encode_unicode 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"termios 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "termios 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
@ -323,13 +317,22 @@ name = "encode_unicode"
version = "0.3.6" version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "env_logger"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "filetime" name = "filetime"
version = "0.2.8" version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
"redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -339,18 +342,6 @@ name = "fixedbitset"
version = "0.1.9" version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "flexi_logger"
version = "0.14.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"chrono 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
"glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"yansi 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "fnv" name = "fnv"
version = "1.0.6" version = "1.0.6"
@ -380,7 +371,7 @@ name = "fsevent-sys"
version = "2.0.1" version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -416,15 +407,10 @@ version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
"wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "glob"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "globset" name = "globset"
version = "0.4.4" version = "0.4.4"
@ -447,10 +433,18 @@ dependencies = [
[[package]] [[package]]
name = "hermit-abi" name = "hermit-abi"
version = "0.1.3" version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "humantime"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -478,7 +472,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"inotify-sys 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "inotify-sys 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -486,7 +480,7 @@ name = "inotify-sys"
version = "0.1.3" version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -497,8 +491,8 @@ dependencies = [
"console 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "console 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
"difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.42 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)", "serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)",
"uuid 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", "uuid 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -508,7 +502,7 @@ name = "iovec"
version = "0.1.4" version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -530,7 +524,7 @@ version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"jemalloc-sys 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "jemalloc-sys 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
"paste 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "paste 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -539,9 +533,9 @@ name = "jemalloc-sys"
version = "0.3.2" version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)", "cc 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)",
"fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -550,7 +544,7 @@ version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"jemalloc-sys 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "jemalloc-sys 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -589,7 +583,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.65" version = "0.2.66"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
@ -620,18 +614,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"crossbeam-channel 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-channel 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.42 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "lsp-types" name = "lsp-types"
version = "0.61.0" version = "0.66.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.42 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_repr 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "serde_repr 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -664,7 +658,7 @@ dependencies = [
"fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)", "net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
@ -674,7 +668,7 @@ dependencies = [
[[package]] [[package]]
name = "mio-extras" name = "mio-extras"
version = "2.0.5" version = "2.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -700,7 +694,7 @@ version = "0.2.33"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -715,22 +709,13 @@ dependencies = [
"fsevent-sys 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "fsevent-sys 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"inotify 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "inotify 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)", "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)",
"mio-extras 2.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "mio-extras 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "num-integer"
version = "0.1.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"num-traits 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "num-traits" name = "num-traits"
version = "0.2.10" version = "0.2.10"
@ -744,8 +729,8 @@ name = "num_cpus"
version = "1.11.1" version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"hermit-abi 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "hermit-abi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -774,9 +759,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
"redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -797,7 +782,7 @@ dependencies = [
"proc-macro-hack 0.5.11 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro-hack 0.5.11 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", "syn 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -831,7 +816,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", "syn 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -859,14 +844,6 @@ dependencies = [
"regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", "regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "psm"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "quick-error" name = "quick-error"
version = "1.2.2" version = "1.2.2"
@ -928,11 +905,13 @@ dependencies = [
name = "ra_cli" name = "ra_cli"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"flexi_logger 0.14.5 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"pico-args 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "pico-args 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_batch 0.1.0", "ra_batch 0.1.0",
"ra_db 0.1.0", "ra_db 0.1.0",
"ra_hir 0.1.0", "ra_hir 0.1.0",
"ra_hir_def 0.1.0",
"ra_hir_ty 0.1.0",
"ra_ide 0.1.0", "ra_ide 0.1.0",
"ra_prof 0.1.0", "ra_prof 0.1.0",
"ra_syntax 0.1.0", "ra_syntax 0.1.0",
@ -963,11 +942,13 @@ dependencies = [
name = "ra_hir" name = "ra_hir"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_db 0.1.0", "ra_db 0.1.0",
"ra_hir_def 0.1.0", "ra_hir_def 0.1.0",
"ra_hir_expand 0.1.0", "ra_hir_expand 0.1.0",
"ra_hir_ty 0.1.0", "ra_hir_ty 0.1.0",
"ra_prof 0.1.0",
"ra_syntax 0.1.0", "ra_syntax 0.1.0",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -976,6 +957,9 @@ dependencies = [
name = "ra_hir_def" name = "ra_hir_def"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anymap 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)",
"drop_bomb 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
"insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", "insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -995,6 +979,7 @@ dependencies = [
name = "ra_hir_expand" name = "ra_hir_expand"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_arena 0.1.0", "ra_arena 0.1.0",
"ra_db 0.1.0", "ra_db 0.1.0",
@ -1010,9 +995,9 @@ name = "ra_hir_ty"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"chalk-ir 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)", "chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)",
"chalk-rust-ir 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)", "chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)",
"chalk-solve 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)", "chalk-solve 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)",
"ena 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)", "ena 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)",
"insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", "insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)", "lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1031,6 +1016,7 @@ dependencies = [
name = "ra_ide" name = "ra_ide"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
"format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"fst 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "fst 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", "insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1060,11 +1046,11 @@ name = "ra_lsp_server"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"crossbeam-channel 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-channel 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"flexi_logger 0.14.5 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"jod-thread 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "jod-thread 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"lsp-server 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "lsp-server 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lsp-types 0.61.0 (registry+https://github.com/rust-lang/crates.io-index)", "lsp-types 0.66.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_ide 0.1.0", "ra_ide 0.1.0",
"ra_prof 0.1.0", "ra_prof 0.1.0",
@ -1075,8 +1061,8 @@ dependencies = [
"ra_vfs_glob 0.1.0", "ra_vfs_glob 0.1.0",
"relative-path 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "relative-path 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.42 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)",
"tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"test_utils 0.1.0", "test_utils 0.1.0",
"threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1091,7 +1077,7 @@ dependencies = [
"ra_syntax 0.1.0", "ra_syntax 0.1.0",
"ra_tt 0.1.0", "ra_tt 0.1.0",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"test_utils 0.1.0", "test_utils 0.1.0",
] ]
@ -1117,14 +1103,14 @@ dependencies = [
name = "ra_project_model" name = "ra_project_model"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"cargo_metadata 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "cargo_metadata 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_arena 0.1.0", "ra_arena 0.1.0",
"ra_cfg 0.1.0", "ra_cfg 0.1.0",
"ra_db 0.1.0", "ra_db 0.1.0",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.42 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1136,9 +1122,10 @@ dependencies = [
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_parser 0.1.0", "ra_parser 0.1.0",
"ra_text_edit 0.1.0", "ra_text_edit 0.1.0",
"rowan 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "rowan 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_lexer 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_lexer 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
"smol_str 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", "smol_str 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"test_utils 0.1.0", "test_utils 0.1.0",
"walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1189,7 +1176,7 @@ version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1207,7 +1194,7 @@ version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", "getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1282,7 +1269,7 @@ name = "rand_jitter"
version = "0.1.4" version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1294,7 +1281,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1396,12 +1383,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", "base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "rowan" name = "rowan"
version = "0.7.1" version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1456,7 +1443,7 @@ dependencies = [
"rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"salsa-macros 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)", "salsa-macros 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1467,7 +1454,7 @@ dependencies = [
"heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", "syn 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1489,7 +1476,7 @@ version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1499,30 +1486,30 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.103" version = "1.0.104"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"serde_derive 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "serde_derive" name = "serde_derive"
version = "1.0.103" version = "1.0.104"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", "syn 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "serde_json" name = "serde_json"
version = "1.0.42" version = "1.0.44"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
"ryu 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "ryu 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1532,7 +1519,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", "syn 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1542,7 +1529,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
"linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
"yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1553,7 +1540,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "smallvec" name = "smallvec"
version = "1.0.0" version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
@ -1561,19 +1548,7 @@ name = "smol_str"
version = "0.1.15" version = "0.1.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "stacker"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)",
"psm 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1583,7 +1558,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "syn" name = "syn"
version = "1.0.8" version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1597,7 +1572,7 @@ version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)",
"remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1609,7 +1584,7 @@ name = "termios"
version = "0.3.1" version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1617,7 +1592,7 @@ name = "test_utils"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.42 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)",
"text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", "text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1647,16 +1622,6 @@ dependencies = [
"num_cpus 1.11.1 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.11.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "time"
version = "0.1.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)",
"redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "unicase" name = "unicase"
version = "2.6.0" version = "2.6.0"
@ -1678,7 +1643,7 @@ name = "unicode-normalization"
version = "0.1.11" version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"smallvec 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1699,7 +1664,7 @@ dependencies = [
"idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
"percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1708,7 +1673,7 @@ version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1781,12 +1746,12 @@ dependencies = [
name = "xtask" name = "xtask"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)", "anyhow 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)",
"pico-args 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "pico-args 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ron 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "ron 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
"walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1798,14 +1763,10 @@ dependencies = [
"linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "yansi"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[metadata] [metadata]
"checksum aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d" "checksum aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d"
"checksum anyhow 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)" = "b412394828b7ca486b362f300b762d8e43dafd6f0d727b63f1cd2ade207c6cef" "checksum anyhow 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)" = "9267dff192e68f3399525901e709a48c1d3982c9c072fa32f2127a0cb0babf14"
"checksum anymap 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)" = "33954243bd79057c2de7338850b85983a44588021f8a5fee574a8888c6de4344"
"checksum arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cff77d8686867eceff3105329d4698d96c2391c176d5d03adc90c7389162b5b8" "checksum arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cff77d8686867eceff3105329d4698d96c2391c176d5d03adc90c7389162b5b8"
"checksum atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "1803c647a3ec87095e7ae7acfca019e98de5ec9a7d01343f611cf3152ed71a90" "checksum atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "1803c647a3ec87095e7ae7acfca019e98de5ec9a7d01343f611cf3152ed71a90"
"checksum autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2" "checksum autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2"
@ -1818,16 +1779,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum bstr 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8d6c2c5b58ab920a4f5aeaaca34b4488074e8cc7596af94e6f8c6ff247c60245" "checksum bstr 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8d6c2c5b58ab920a4f5aeaaca34b4488074e8cc7596af94e6f8c6ff247c60245"
"checksum byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5" "checksum byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5"
"checksum c2-chacha 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "214238caa1bf3a496ec3392968969cab8549f96ff30652c9e56885329315f6bb" "checksum c2-chacha 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "214238caa1bf3a496ec3392968969cab8549f96ff30652c9e56885329315f6bb"
"checksum cargo_metadata 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8d2d1617e838936c0d2323a65cc151e03ae19a7678dd24f72bccf27119b90a5d" "checksum cargo_metadata 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "46e3374c604fb39d1a2f35ed5e4a4e30e60d01fab49446e08f1b3e9a90aef202"
"checksum cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)" = "aa87058dce70a3ff5621797f1506cb837edd02ac4c0ae642b4542dce802908b8" "checksum cc 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)" = "f52a465a666ca3d838ebbf08b241383421412fe7ebb463527bba275526d89f76"
"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" "checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
"checksum chalk-derive 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)" = "<none>" "checksum chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)" = "<none>"
"checksum chalk-engine 0.9.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)" = "<none>" "checksum chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)" = "<none>"
"checksum chalk-ir 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)" = "<none>" "checksum chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)" = "<none>"
"checksum chalk-macros 0.1.1 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)" = "<none>" "checksum chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)" = "<none>"
"checksum chalk-rust-ir 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)" = "<none>" "checksum chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)" = "<none>"
"checksum chalk-solve 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)" = "<none>" "checksum chalk-solve 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)" = "<none>"
"checksum chrono 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "31850b4a4d6bae316f7a09e691c944c28299298837edc0a03f755618c23cbc01"
"checksum clicolors-control 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90082ee5dcdd64dc4e9e0d37fbf3ee325419e39c0092191e0393df65518f741e" "checksum clicolors-control 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90082ee5dcdd64dc4e9e0d37fbf3ee325419e39c0092191e0393df65518f741e"
"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" "checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
"checksum console 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f5d540c2d34ac9dd0deb5f3b5f54c36c79efa78f6b3ad19106a554d07a7b5d9f" "checksum console 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f5d540c2d34ac9dd0deb5f3b5f54c36c79efa78f6b3ad19106a554d07a7b5d9f"
@ -1843,9 +1803,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3" "checksum either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3"
"checksum ena 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8944dc8fa28ce4a38f778bd46bf7d923fe73eed5a439398507246c8e017e6f36" "checksum ena 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8944dc8fa28ce4a38f778bd46bf7d923fe73eed5a439398507246c8e017e6f36"
"checksum encode_unicode 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" "checksum encode_unicode 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
"checksum env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36"
"checksum filetime 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "1ff6d4dab0aa0c8e6346d46052e93b13a16cf847b54ed357087c35011048cc7d" "checksum filetime 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "1ff6d4dab0aa0c8e6346d46052e93b13a16cf847b54ed357087c35011048cc7d"
"checksum fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "86d4de0081402f5e88cdac65c8dcdcc73118c1a7a465e2a05f0da05843a8ea33" "checksum fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "86d4de0081402f5e88cdac65c8dcdcc73118c1a7a465e2a05f0da05843a8ea33"
"checksum flexi_logger 0.14.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a13ea6b8a4debecf47bf3966d56db0e21366bc3a3649ba159e1a9e6fdd36a4f4"
"checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3" "checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3"
"checksum format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f7aea5a5909a74969507051a3b17adc84737e31a5f910559892aedce026f4d53" "checksum format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f7aea5a5909a74969507051a3b17adc84737e31a5f910559892aedce026f4d53"
"checksum fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5f2a4a2034423744d2cc7ca2068453168dcdb82c438419e639a26bd87839c674" "checksum fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5f2a4a2034423744d2cc7ca2068453168dcdb82c438419e639a26bd87839c674"
@ -1856,10 +1816,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" "checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
"checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" "checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
"checksum getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e7db7ca94ed4cd01190ceee0d8a8052f08a247aa1b469a7f68c6a3b71afcf407" "checksum getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e7db7ca94ed4cd01190ceee0d8a8052f08a247aa1b469a7f68c6a3b71afcf407"
"checksum glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
"checksum globset 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "925aa2cac82d8834e2b2a4415b6f6879757fb5c0928fc445ae76461a12eed8f2" "checksum globset 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "925aa2cac82d8834e2b2a4415b6f6879757fb5c0928fc445ae76461a12eed8f2"
"checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205" "checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205"
"checksum hermit-abi 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "307c3c9f937f38e3534b1d6447ecf090cafcc9744e4a6360e8b037b2cf5af120" "checksum hermit-abi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "f629dc602392d3ec14bfc8a09b5e644d7ffd725102b48b81e59f90f2633621d7"
"checksum humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f"
"checksum idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9" "checksum idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9"
"checksum indexmap 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712d7b3ea5827fcb9d4fda14bf4da5f136f0db2ae9c8f4bd4e2d1c6fde4e6db2" "checksum indexmap 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712d7b3ea5827fcb9d4fda14bf4da5f136f0db2ae9c8f4bd4e2d1c6fde4e6db2"
"checksum inotify 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "40b54539f3910d6f84fbf9a643efd6e3aa6e4f001426c0329576128255994718" "checksum inotify 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "40b54539f3910d6f84fbf9a643efd6e3aa6e4f001426c0329576128255994718"
@ -1877,21 +1837,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cc4fd87be4a815fd373e02773983940f0d75fb26fde8c098e9e45f7af03154c0" "checksum lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cc4fd87be4a815fd373e02773983940f0d75fb26fde8c098e9e45f7af03154c0"
"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" "checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
"checksum lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f" "checksum lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f"
"checksum libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)" = "1a31a0627fdf1f6a39ec0dd577e101440b7db22672c0901fe00a9a6fbb5c24e8" "checksum libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)" = "d515b1f41455adea1313a4a2ac8a8a477634fbae63cc6100e3aebb207ce61558"
"checksum linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ae91b68aebc4ddb91978b11a1b02ddd8602a05ec19002801c5666000e05e0f83" "checksum linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ae91b68aebc4ddb91978b11a1b02ddd8602a05ec19002801c5666000e05e0f83"
"checksum lock_api 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e57b3997725d2b60dbec1297f6c2e2957cc383db1cebd6be812163f969c7d586" "checksum lock_api 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e57b3997725d2b60dbec1297f6c2e2957cc383db1cebd6be812163f969c7d586"
"checksum log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7" "checksum log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7"
"checksum lsp-server 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0ba36405bd742139ab79c246ca5adb7fde2fe1a0f495e2c8e2f607b607dedb12" "checksum lsp-server 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0ba36405bd742139ab79c246ca5adb7fde2fe1a0f495e2c8e2f607b607dedb12"
"checksum lsp-types 0.61.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fa3268fbe8beb2795c2fb327bf44f4f3d24f5fe9ebc18d7e2980afd444d72bcf" "checksum lsp-types 0.66.0 (registry+https://github.com/rust-lang/crates.io-index)" = "49a2dddfe2791cbf4b5eff5a581e45becf47a24b128a62de80e7cc135bf50064"
"checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" "checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08"
"checksum memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e" "checksum memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e"
"checksum memoffset 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "75189eb85871ea5c2e2c15abbdd541185f63b408415e5051f5cac122d8c774b9" "checksum memoffset 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "75189eb85871ea5c2e2c15abbdd541185f63b408415e5051f5cac122d8c774b9"
"checksum mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)" = "302dec22bcf6bae6dfb69c647187f4b4d0fb6f535521f7bc022430ce8e12008f" "checksum mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)" = "302dec22bcf6bae6dfb69c647187f4b4d0fb6f535521f7bc022430ce8e12008f"
"checksum mio-extras 2.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "46e73a04c2fa6250b8d802134d56d554a9ec2922bf977777c805ea5def61ce40" "checksum mio-extras 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "52403fe290012ce777c4626790c8951324a2b9e3316b3143779c72b029742f19"
"checksum miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919" "checksum miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919"
"checksum net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "42550d9fb7b6684a6d404d9fa7250c2eb2646df731d1c06afc06dcee9e1bcf88" "checksum net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "42550d9fb7b6684a6d404d9fa7250c2eb2646df731d1c06afc06dcee9e1bcf88"
"checksum notify 4.0.14 (registry+https://github.com/rust-lang/crates.io-index)" = "199628fc33b21bc767baa057490b00b382ecbae030803a7b36292422d15b778b" "checksum notify 4.0.14 (registry+https://github.com/rust-lang/crates.io-index)" = "199628fc33b21bc767baa057490b00b382ecbae030803a7b36292422d15b778b"
"checksum num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)" = "b85e541ef8255f6cf42bbfe4ef361305c6c135d10919ecc26126c4e5ae94bc09"
"checksum num-traits 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c81ffc11c212fa327657cb19dd85eb7419e163b5b076bede2bdb5c974c07e4" "checksum num-traits 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c81ffc11c212fa327657cb19dd85eb7419e163b5b076bede2bdb5c974c07e4"
"checksum num_cpus 1.11.1 (registry+https://github.com/rust-lang/crates.io-index)" = "76dac5ed2a876980778b8b85f75a71b6cbf0db0b1232ee12f826bccb00d09d72" "checksum num_cpus 1.11.1 (registry+https://github.com/rust-lang/crates.io-index)" = "76dac5ed2a876980778b8b85f75a71b6cbf0db0b1232ee12f826bccb00d09d72"
"checksum once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "891f486f630e5c5a4916c7e16c4b24a53e78c860b646e9f8e005e4f16847bfed" "checksum once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "891f486f630e5c5a4916c7e16c4b24a53e78c860b646e9f8e005e4f16847bfed"
@ -1907,7 +1866,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum proc-macro-hack 0.5.11 (registry+https://github.com/rust-lang/crates.io-index)" = "ecd45702f76d6d3c75a80564378ae228a85f0b59d2f3ed43c91b4a69eb2ebfc5" "checksum proc-macro-hack 0.5.11 (registry+https://github.com/rust-lang/crates.io-index)" = "ecd45702f76d6d3c75a80564378ae228a85f0b59d2f3ed43c91b4a69eb2ebfc5"
"checksum proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "9c9e470a8dc4aeae2dee2f335e8f533e2d4b347e1434e5671afc49b054592f27" "checksum proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "9c9e470a8dc4aeae2dee2f335e8f533e2d4b347e1434e5671afc49b054592f27"
"checksum proptest 0.9.4 (registry+https://github.com/rust-lang/crates.io-index)" = "cf147e022eacf0c8a054ab864914a7602618adba841d800a9a9868a5237a529f" "checksum proptest 0.9.4 (registry+https://github.com/rust-lang/crates.io-index)" = "cf147e022eacf0c8a054ab864914a7602618adba841d800a9a9868a5237a529f"
"checksum psm 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "b14fc68b454f875abc8354c2555e1d56596f74833ddc0f77f87f4871ed6a30e0"
"checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0" "checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0"
"checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe" "checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe"
"checksum ra_vfs 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bc898f237e4b4498959ae0100c688793a23e77624d44ef710ba70094217f98e0" "checksum ra_vfs 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bc898f237e4b4498959ae0100c688793a23e77624d44ef710ba70094217f98e0"
@ -1935,7 +1893,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum relative-path 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bedde000f40f2921ce439ea165c9c53fd629bfa115140c72e22aceacb4a21954" "checksum relative-path 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bedde000f40f2921ce439ea165c9c53fd629bfa115140c72e22aceacb4a21954"
"checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" "checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e"
"checksum ron 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2ece421e0c4129b90e4a35b6f625e472e96c552136f5093a2f4fa2bbb75a62d5" "checksum ron 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2ece421e0c4129b90e4a35b6f625e472e96c552136f5093a2f4fa2bbb75a62d5"
"checksum rowan 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ca620bbf9c48c92b5cef19f96354a309ac36b7d8ef7c591e66117335c8b1988b" "checksum rowan 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3eb10a10a48f0f809a217bcf074b85a03dcf79831bae80e7f1a043d0897463e2"
"checksum rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783" "checksum rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783"
"checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8" "checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8"
"checksum rustc_lexer 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c86aae0c77166108c01305ee1a36a1e77289d7dc6ca0a3cd91ff4992de2d16a5" "checksum rustc_lexer 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c86aae0c77166108c01305ee1a36a1e77289d7dc6ca0a3cd91ff4992de2d16a5"
@ -1947,24 +1905,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b42e15e59b18a828bbf5c58ea01debb36b9b096346de35d941dcb89009f24a0d" "checksum scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b42e15e59b18a828bbf5c58ea01debb36b9b096346de35d941dcb89009f24a0d"
"checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" "checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" "checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
"checksum serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)" = "1217f97ab8e8904b57dd22eb61cde455fa7446a9c1cf43966066da047c1f3702" "checksum serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)" = "414115f25f818d7dfccec8ee535d76949ae78584fc4f79a6f45a904bf8ab4449"
"checksum serde_derive 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)" = "a8c6faef9a2e64b0064f48570289b4bf8823b7581f1d6157c1b52152306651d0" "checksum serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)" = "128f9e303a5a29922045a830221b8f78ec74a5f544944f3d5984f8ec3895ef64"
"checksum serde_json 1.0.42 (registry+https://github.com/rust-lang/crates.io-index)" = "1a3351dcbc1f067e2c92ab7c3c1f288ad1a4cffc470b5aaddb4c2e0a3ae80043" "checksum serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)" = "48c575e0cc52bdd09b47f330f646cf59afc586e9c4e3ccd6fc1f625b8ea1dad7"
"checksum serde_repr 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "cd02c7587ec314570041b2754829f84d873ced14a96d1fd1823531e11db40573" "checksum serde_repr 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "cd02c7587ec314570041b2754829f84d873ced14a96d1fd1823531e11db40573"
"checksum serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)" = "691b17f19fc1ec9d94ec0b5864859290dff279dbd7b03f017afda54eb36c3c35" "checksum serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)" = "691b17f19fc1ec9d94ec0b5864859290dff279dbd7b03f017afda54eb36c3c35"
"checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" "checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
"checksum smallvec 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ecf3b85f68e8abaa7555aa5abdb1153079387e60b718283d732f03897fcfc86" "checksum smallvec 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "44e59e0c9fa00817912ae6e4e6e3c4fe04455e75699d06eedc7d85917ed8e8f4"
"checksum smol_str 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "34836c9a295c62c2ce3514471117c5cb269891e8421b2aafdd910050576c4d8b" "checksum smol_str 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "34836c9a295c62c2ce3514471117c5cb269891e8421b2aafdd910050576c4d8b"
"checksum stacker 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "d96fc4f13a0ac088e9a3cd9af1cc8c5cc1ab5deb2145cef661267dfc9c542f8a"
"checksum superslice 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ab16ced94dbd8a46c82fd81e3ed9a8727dac2977ea869d217bcc4ea1f122e81f" "checksum superslice 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ab16ced94dbd8a46c82fd81e3ed9a8727dac2977ea869d217bcc4ea1f122e81f"
"checksum syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "661641ea2aa15845cddeb97dad000d22070bb5c1fb456b96c1cba883ec691e92" "checksum syn 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)" = "dff0acdb207ae2fe6d5976617f887eb1e35a2ba52c13c7234c790960cdad9238"
"checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9" "checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9"
"checksum termios 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "72b620c5ea021d75a735c943269bb07d30c9b77d6ac6b236bc8b5c496ef05625" "checksum termios 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "72b620c5ea021d75a735c943269bb07d30c9b77d6ac6b236bc8b5c496ef05625"
"checksum text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e08bbcb7a3adbda0eb23431206b653bdad3d8dea311e72d36bf2215e27a42579" "checksum text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e08bbcb7a3adbda0eb23431206b653bdad3d8dea311e72d36bf2215e27a42579"
"checksum thin-dst 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c52fd98a9e4913c466d83381a59245691875d2f3e04611fca57f964bd8aa96e1" "checksum thin-dst 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c52fd98a9e4913c466d83381a59245691875d2f3e04611fca57f964bd8aa96e1"
"checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b" "checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b"
"checksum threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e2f0c90a5f3459330ac8bc0d2f879c693bb7a2f59689c1083fc4ef83834da865" "checksum threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e2f0c90a5f3459330ac8bc0d2f879c693bb7a2f59689c1083fc4ef83834da865"
"checksum time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "db8dcfca086c1143c9270ac42a2bbd8a7ee477b78ac8e45b19abfb0cbede4b6f"
"checksum unicase 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" "checksum unicase 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
"checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" "checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
"checksum unicode-normalization 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b561e267b2326bb4cebfc0ef9e68355c7abe6c6f522aeac2f5bf95d56c59bdcf" "checksum unicode-normalization 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b561e267b2326bb4cebfc0ef9e68355c7abe6c6f522aeac2f5bf95d56c59bdcf"
@ -1983,4 +1939,3 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" "checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
"checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e" "checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e"
"checksum yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "65923dd1784f44da1d2c3dbbc5e822045628c590ba72123e1c73d3c230c4434d" "checksum yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "65923dd1784f44da1d2c3dbbc5e822045628c590ba72123e1c73d3c230c4434d"
"checksum yansi 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9fc79f4a1e39857fc00c3f662cbf2651c771f00e9c15fe2abc341806bd46bd71"

View file

@ -11,3 +11,4 @@ incremental = true
debug = 0 # set this to 1 or 2 to get more useful backtraces in debugger debug = 0 # set this to 1 or 2 to get more useful backtraces in debugger
[patch.'crates-io'] [patch.'crates-io']
# rowan = { path = "../rowan" }

View file

@ -37,7 +37,8 @@ $ cargo xtask install
$ cargo xtask install --server $ cargo xtask install --server
``` ```
For non-standard setup of VS Code and other editors, see [./docs/user](./docs/user). For non-standard setup of VS Code and other editors, or if the language server
cannot start, see [./docs/user](./docs/user).
## Documentation ## Documentation
@ -57,7 +58,7 @@ https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frls-2.2E0
## Quick Links ## Quick Links
* API docs: https://rust-analyzer.github.io/rust-analyzer/ra_ide/ * API docs: https://rust-analyzer.github.io/rust-analyzer/ra_ide/
* Website: https://rust-analyzer.github.io/
## License ## License

View file

@ -1,5 +1,5 @@
//! This module defines `AssistCtx` -- the API surface that is exposed to assists. //! This module defines `AssistCtx` -- the API surface that is exposed to assists.
use hir::{db::HirDatabase, SourceAnalyzer}; use hir::{db::HirDatabase, InFile, SourceAnalyzer};
use ra_db::FileRange; use ra_db::FileRange;
use ra_fmt::{leading_indent, reindent}; use ra_fmt::{leading_indent, reindent};
use ra_syntax::{ use ra_syntax::{
@ -117,7 +117,7 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> {
node: &SyntaxNode, node: &SyntaxNode,
offset: Option<TextUnit>, offset: Option<TextUnit>,
) -> SourceAnalyzer { ) -> SourceAnalyzer {
SourceAnalyzer::new(self.db, hir::Source::new(self.frange.file_id.into(), node), offset) SourceAnalyzer::new(self.db, InFile::new(self.frange.file_id.into(), node), offset)
} }
pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement { pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement {

View file

@ -0,0 +1,206 @@
//! FIXME: write short doc here
use crate::{Assist, AssistCtx, AssistId};
use hir::db::HirDatabase;
use join_to_string::join;
use ra_syntax::{
ast::{self, AstNode},
Direction, SmolStr,
SyntaxKind::{IDENT, WHITESPACE},
TextRange, TextUnit,
};
const DERIVE_TRAIT: &'static str = "derive";
// Assist: add_custom_impl
//
// Adds impl block for derived trait.
//
// ```
// #[derive(Deb<|>ug, Display)]
// struct S;
// ```
// ->
// ```
// #[derive(Display)]
// struct S;
//
// impl Debug for S {
//
// }
// ```
pub(crate) fn add_custom_impl(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let input = ctx.find_node_at_offset::<ast::AttrInput>()?;
let attr = input.syntax().parent().and_then(ast::Attr::cast)?;
let attr_name = attr
.syntax()
.descendants_with_tokens()
.filter(|t| t.kind() == IDENT)
.find_map(|i| i.into_token())
.filter(|t| *t.text() == DERIVE_TRAIT)?
.text()
.clone();
let trait_token =
ctx.token_at_offset().filter(|t| t.kind() == IDENT && *t.text() != attr_name).next()?;
let annotated = attr.syntax().siblings(Direction::Next).find_map(|s| ast::Name::cast(s))?;
let annotated_name = annotated.syntax().text().to_string();
let start_offset = annotated.syntax().parent()?.text_range().end();
ctx.add_assist(AssistId("add_custom_impl"), "add custom impl", |edit| {
edit.target(attr.syntax().text_range());
let new_attr_input = input
.syntax()
.descendants_with_tokens()
.filter(|t| t.kind() == IDENT)
.filter_map(|t| t.into_token().map(|t| t.text().clone()))
.filter(|t| t != trait_token.text())
.collect::<Vec<SmolStr>>();
let has_more_derives = new_attr_input.len() > 0;
let new_attr_input =
join(new_attr_input.iter()).separator(", ").surround_with("(", ")").to_string();
let new_attr_input_len = new_attr_input.len();
let mut buf = String::new();
buf.push_str("\n\nimpl ");
buf.push_str(trait_token.text().as_str());
buf.push_str(" for ");
buf.push_str(annotated_name.as_str());
buf.push_str(" {\n");
let cursor_delta = if has_more_derives {
edit.replace(input.syntax().text_range(), new_attr_input);
input.syntax().text_range().len() - TextUnit::from_usize(new_attr_input_len)
} else {
let attr_range = attr.syntax().text_range();
edit.delete(attr_range);
let line_break_range = attr
.syntax()
.next_sibling_or_token()
.filter(|t| t.kind() == WHITESPACE)
.map(|t| t.text_range())
.unwrap_or(TextRange::from_to(TextUnit::from(0), TextUnit::from(0)));
edit.delete(line_break_range);
attr_range.len() + line_break_range.len()
};
edit.set_cursor(start_offset + TextUnit::of_str(&buf) - cursor_delta);
buf.push_str("\n}");
edit.insert(start_offset, buf);
})
}
#[cfg(test)]
mod tests {
use super::*;
use crate::helpers::{check_assist, check_assist_not_applicable};
#[test]
fn add_custom_impl_for_unique_input() {
check_assist(
add_custom_impl,
"
#[derive(Debu<|>g)]
struct Foo {
bar: String,
}
",
"
struct Foo {
bar: String,
}
impl Debug for Foo {
<|>
}
",
)
}
#[test]
fn add_custom_impl_for_with_visibility_modifier() {
check_assist(
add_custom_impl,
"
#[derive(Debug<|>)]
pub struct Foo {
bar: String,
}
",
"
pub struct Foo {
bar: String,
}
impl Debug for Foo {
<|>
}
",
)
}
#[test]
fn add_custom_impl_when_multiple_inputs() {
check_assist(
add_custom_impl,
"
#[derive(Display, Debug<|>, Serialize)]
struct Foo {}
",
"
#[derive(Display, Serialize)]
struct Foo {}
impl Debug for Foo {
<|>
}
",
)
}
#[test]
fn test_ignore_derive_macro_without_input() {
check_assist_not_applicable(
add_custom_impl,
"
#[derive(<|>)]
struct Foo {}
",
)
}
#[test]
fn test_ignore_if_cursor_on_param() {
check_assist_not_applicable(
add_custom_impl,
"
#[derive<|>(Debug)]
struct Foo {}
",
);
check_assist_not_applicable(
add_custom_impl,
"
#[derive(Debug)<|>]
struct Foo {}
",
)
}
#[test]
fn test_ignore_if_not_derive() {
check_assist_not_applicable(
add_custom_impl,
"
#[allow(non_camel_<|>case_types)]
struct Foo {}
",
)
}
}

View file

@ -578,17 +578,21 @@ fn apply_auto_import(
fn collect_hir_path_segments(path: &hir::Path) -> Option<Vec<SmolStr>> { fn collect_hir_path_segments(path: &hir::Path) -> Option<Vec<SmolStr>> {
let mut ps = Vec::<SmolStr>::with_capacity(10); let mut ps = Vec::<SmolStr>::with_capacity(10);
match path.kind { match path.kind() {
hir::PathKind::Abs => ps.push("".into()), hir::PathKind::Abs => ps.push("".into()),
hir::PathKind::Crate => ps.push("crate".into()), hir::PathKind::Crate => ps.push("crate".into()),
hir::PathKind::Plain => {} hir::PathKind::Plain => {}
hir::PathKind::Self_ => ps.push("self".into()), hir::PathKind::Super(0) => ps.push("self".into()),
hir::PathKind::Super => ps.push("super".into()), hir::PathKind::Super(lvl) => {
hir::PathKind::Type(_) | hir::PathKind::DollarCrate(_) => return None, let mut chain = "super".to_string();
} for _ in 0..*lvl {
for s in path.segments.iter() { chain += "::super";
ps.push(s.name.to_string().into()); }
ps.push(chain.into());
}
hir::PathKind::DollarCrate(_) => return None,
} }
ps.extend(path.segments().iter().map(|it| it.name.to_string().into()));
Some(ps) Some(ps)
} }

View file

@ -1,5 +1,5 @@
use format_buf::format; use format_buf::format;
use hir::{db::HirDatabase, FromSource}; use hir::{db::HirDatabase, FromSource, InFile};
use join_to_string::join; use join_to_string::join;
use ra_syntax::{ use ra_syntax::{
ast::{ ast::{
@ -56,42 +56,39 @@ pub(crate) fn add_new(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let vis = vis.as_ref().map(String::as_str).unwrap_or(""); let vis = vis.as_ref().map(String::as_str).unwrap_or("");
write!(&mut buf, " {}fn new(", vis).unwrap(); write!(&mut buf, " {}fn new(", vis).unwrap();
join(field_list.fields().map(|f| { join(field_list.fields().filter_map(|f| {
format!( Some(format!("{}: {}", f.name()?.syntax().text(), f.ascribed_type()?.syntax().text()))
"{}: {}",
f.name().unwrap().syntax().text(),
f.ascribed_type().unwrap().syntax().text()
)
})) }))
.separator(", ") .separator(", ")
.to_buf(&mut buf); .to_buf(&mut buf);
buf.push_str(") -> Self { Self {"); buf.push_str(") -> Self { Self {");
join(field_list.fields().map(|f| f.name().unwrap().syntax().text())) join(field_list.fields().filter_map(|f| Some(f.name()?.syntax().text())))
.separator(", ") .separator(", ")
.surround_with(" ", " ") .surround_with(" ", " ")
.to_buf(&mut buf); .to_buf(&mut buf);
buf.push_str("} }"); buf.push_str("} }");
let (start_offset, end_offset) = if let Some(impl_block) = impl_block { let (start_offset, end_offset) = impl_block
buf.push('\n'); .and_then(|impl_block| {
let start = impl_block buf.push('\n');
.syntax() let start = impl_block
.descendants_with_tokens() .syntax()
.find(|t| t.kind() == T!['{']) .descendants_with_tokens()
.unwrap() .find(|t| t.kind() == T!['{'])?
.text_range() .text_range()
.end(); .end();
(start, TextUnit::from_usize(1)) Some((start, TextUnit::from_usize(1)))
} else { })
buf = generate_impl_text(&strukt, &buf); .unwrap_or_else(|| {
let start = strukt.syntax().text_range().end(); buf = generate_impl_text(&strukt, &buf);
let start = strukt.syntax().text_range().end();
(start, TextUnit::from_usize(3)) (start, TextUnit::from_usize(3))
}; });
edit.set_cursor(start_offset + TextUnit::of_str(&buf) - end_offset); edit.set_cursor(start_offset + TextUnit::of_str(&buf) - end_offset);
edit.insert(start_offset, buf); edit.insert(start_offset, buf);
@ -141,44 +138,41 @@ fn find_struct_impl(
})?; })?;
let struct_ty = { let struct_ty = {
let src = hir::Source { file_id: ctx.frange.file_id.into(), value: strukt.clone() }; let src = InFile { file_id: ctx.frange.file_id.into(), value: strukt.clone() };
hir::Struct::from_source(db, src).unwrap().ty(db) hir::Struct::from_source(db, src)?.ty(db)
}; };
let mut found_new_fn = false; let block = module.descendants().filter_map(ast::ImplBlock::cast).find_map(|impl_blk| {
let src = InFile { file_id: ctx.frange.file_id.into(), value: impl_blk.clone() };
let block = module.descendants().filter_map(ast::ImplBlock::cast).find(|impl_blk| { let blk = hir::ImplBlock::from_source(db, src)?;
if found_new_fn {
return false;
}
let src = hir::Source { file_id: ctx.frange.file_id.into(), value: impl_blk.clone() };
let blk = hir::ImplBlock::from_source(db, src).unwrap();
let same_ty = blk.target_ty(db) == struct_ty; let same_ty = blk.target_ty(db) == struct_ty;
let not_trait_impl = blk.target_trait(db).is_none(); let not_trait_impl = blk.target_trait(db).is_none();
if !(same_ty && not_trait_impl) { if !(same_ty && not_trait_impl) {
return false; None
} else {
Some(impl_blk)
} }
found_new_fn = has_new_fn(impl_blk);
true
}); });
if found_new_fn { if let Some(ref impl_blk) = block {
None if has_new_fn(impl_blk) {
} else { return None;
Some(block) }
} }
Some(block)
} }
fn has_new_fn(imp: &ast::ImplBlock) -> bool { fn has_new_fn(imp: &ast::ImplBlock) -> bool {
if let Some(il) = imp.item_list() { if let Some(il) = imp.item_list() {
for item in il.impl_items() { for item in il.impl_items() {
if let ast::ImplItem::FnDef(f) = item { if let ast::ImplItem::FnDef(f) = item {
if f.name().unwrap().text().eq_ignore_ascii_case("new") { if let Some(name) = f.name() {
return true; if name.text().eq_ignore_ascii_case("new") {
return true;
}
} }
} }
} }

View file

@ -83,8 +83,8 @@ pub(crate) fn convert_to_guarded_return(ctx: AssistCtx<impl HirDatabase>) -> Opt
let parent_container = parent_block.syntax().parent()?.parent()?; let parent_container = parent_block.syntax().parent()?.parent()?;
let early_expression: ast::Expr = match parent_container.kind() { let early_expression: ast::Expr = match parent_container.kind() {
WHILE_EXPR | LOOP_EXPR => make::expr_continue().into(), WHILE_EXPR | LOOP_EXPR => make::expr_continue(),
FN_DEF => make::expr_return().into(), FN_DEF => make::expr_return(),
_ => return None, _ => return None,
}; };
@ -116,13 +116,13 @@ pub(crate) fn convert_to_guarded_return(ctx: AssistCtx<impl HirDatabase>) -> Opt
) )
.into(), .into(),
), ),
make::expr_path(make::path_from_name_ref(make::name_ref("it"))).into(), make::expr_path(make::path_from_name_ref(make::name_ref("it"))),
); );
let sad_arm = make::match_arm( let sad_arm = make::match_arm(
// FIXME: would be cool to use `None` or `Err(_)` if appropriate // FIXME: would be cool to use `None` or `Err(_)` if appropriate
once(make::placeholder_pat().into()), once(make::placeholder_pat().into()),
early_expression.into(), early_expression,
); );
make::expr_match(cond_expr, make::match_arm_list(vec![happy_arm, sad_arm])) make::expr_match(cond_expr, make::match_arm_list(vec![happy_arm, sad_arm]))
@ -130,7 +130,7 @@ pub(crate) fn convert_to_guarded_return(ctx: AssistCtx<impl HirDatabase>) -> Opt
let let_stmt = make::let_stmt( let let_stmt = make::let_stmt(
make::bind_pat(make::name(&bound_ident.syntax().to_string())).into(), make::bind_pat(make::name(&bound_ident.syntax().to_string())).into(),
Some(match_expr.into()), Some(match_expr),
); );
let let_stmt = if_indent_level.increase_indent(let_stmt); let let_stmt = if_indent_level.increase_indent(let_stmt);
replace(let_stmt.syntax(), &then_block, &parent_block, &if_expr) replace(let_stmt.syntax(), &then_block, &parent_block, &if_expr)

View file

@ -2,6 +2,25 @@
use super::check; use super::check;
#[test]
fn doctest_add_custom_impl() {
check(
"add_custom_impl",
r#####"
#[derive(Deb<|>ug, Display)]
struct S;
"#####,
r#####"
#[derive(Display)]
struct S;
impl Debug for S {
}
"#####,
)
}
#[test] #[test]
fn doctest_add_derive() { fn doctest_add_derive() {
check( check(

View file

@ -95,6 +95,7 @@ mod assists {
mod add_derive; mod add_derive;
mod add_explicit_type; mod add_explicit_type;
mod add_impl; mod add_impl;
mod add_custom_impl;
mod add_new; mod add_new;
mod apply_demorgan; mod apply_demorgan;
mod invert_if; mod invert_if;
@ -121,6 +122,7 @@ mod assists {
add_derive::add_derive, add_derive::add_derive,
add_explicit_type::add_explicit_type, add_explicit_type::add_explicit_type,
add_impl::add_impl, add_impl::add_impl,
add_custom_impl::add_custom_impl,
add_new::add_new, add_new::add_new,
apply_demorgan::apply_demorgan, apply_demorgan::apply_demorgan,
invert_if::invert_if, invert_if::invert_if,

View file

@ -43,5 +43,3 @@ impl FileLoader for TestDB {
FileLoaderDelegate(self).relevant_crates(file_id) FileLoaderDelegate(self).relevant_crates(file_id)
} }
} }
impl hir::debug::HirDebugHelper for TestDB {}

View file

@ -22,7 +22,7 @@ fn vfs_root_to_id(r: ra_vfs::VfsRoot) -> SourceRootId {
pub fn load_cargo(root: &Path) -> Result<(AnalysisHost, FxHashMap<SourceRootId, PackageRoot>)> { pub fn load_cargo(root: &Path) -> Result<(AnalysisHost, FxHashMap<SourceRootId, PackageRoot>)> {
let root = std::env::current_dir()?.join(root); let root = std::env::current_dir()?.join(root);
let ws = ProjectWorkspace::discover(root.as_ref())?; let ws = ProjectWorkspace::discover(root.as_ref(), &Default::default())?;
let project_roots = ws.to_roots(); let project_roots = ws.to_roots();
let (sender, receiver) = unbounded(); let (sender, receiver) = unbounded();
let sender = Box::new(move |t| sender.send(t).unwrap()); let sender = Box::new(move |t| sender.send(t).unwrap());

View file

@ -7,12 +7,14 @@ publish = false
[dependencies] [dependencies]
pico-args = "0.3.0" pico-args = "0.3.0"
flexi_logger = "0.14.0" env_logger = { version = "0.7.1", default-features = false, features = ["humantime"] }
ra_syntax = { path = "../ra_syntax" } ra_syntax = { path = "../ra_syntax" }
ra_ide = { path = "../ra_ide" } ra_ide = { path = "../ra_ide" }
ra_batch = { path = "../ra_batch" } ra_batch = { path = "../ra_batch" }
ra_hir = { path = "../ra_hir" } hir = { path = "../ra_hir", package = "ra_hir" }
hir_ty = { path = "../ra_hir_ty", package = "ra_hir_ty" }
hir_def = { path = "../ra_hir_def", package = "ra_hir_def" }
ra_db = { path = "../ra_db" } ra_db = { path = "../ra_db" }
[dependencies.ra_prof] [dependencies.ra_prof]

View file

@ -2,8 +2,13 @@
use std::{collections::HashSet, fmt::Write, path::Path, time::Instant}; use std::{collections::HashSet, fmt::Write, path::Path, time::Instant};
use hir::{
db::{DefDatabase, HirDatabase},
AssocItem, Crate, HasSource, HirDisplay, ModuleDef,
};
use hir_def::FunctionId;
use hir_ty::{Ty, TypeWalk};
use ra_db::SourceDatabaseExt; use ra_db::SourceDatabaseExt;
use ra_hir::{AssocItem, Crate, HasSource, HirDisplay, ModuleDef, Ty, TypeWalk};
use ra_syntax::AstNode; use ra_syntax::AstNode;
use crate::{progress_report::ProgressReport, Result, Verbosity}; use crate::{progress_report::ProgressReport, Result, Verbosity};
@ -101,8 +106,9 @@ pub fn run(
continue; continue;
} }
} }
let body = f.body(db); let f_id = FunctionId::from(f);
let inference_result = f.infer(db); let body = db.body(f_id.into());
let inference_result = db.infer(f_id.into());
for (expr_id, _) in body.exprs.iter() { for (expr_id, _) in body.exprs.iter() {
let ty = &inference_result[expr_id]; let ty = &inference_result[expr_id];
num_exprs += 1; num_exprs += 1;
@ -122,7 +128,8 @@ pub fn run(
if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) { if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) {
num_type_mismatches += 1; num_type_mismatches += 1;
if verbosity.is_verbose() { if verbosity.is_verbose() {
let src = f.body_source_map(db).expr_syntax(expr_id); let (_, sm) = db.body_with_source_map(f_id.into());
let src = sm.expr_syntax(expr_id);
if let Some(src) = src { if let Some(src) = src {
// FIXME: it might be nice to have a function (on Analysis?) that goes from Source<T> -> (LineCol, LineCol) directly // FIXME: it might be nice to have a function (on Analysis?) that goes from Source<T> -> (LineCol, LineCol) directly
let original_file = src.file_id.original_file(db); let original_file = src.file_id.original_file(db);

View file

@ -7,7 +7,6 @@ mod progress_report;
use std::{error::Error, fmt::Write, io::Read}; use std::{error::Error, fmt::Write, io::Read};
use flexi_logger::Logger;
use pico_args::Arguments; use pico_args::Arguments;
use ra_ide::{file_structure, Analysis}; use ra_ide::{file_structure, Analysis};
use ra_prof::profile; use ra_prof::profile;
@ -32,7 +31,7 @@ impl Verbosity {
} }
fn main() -> Result<()> { fn main() -> Result<()> {
Logger::with_env_or_str("error").start()?; env_logger::try_init()?;
let subcommand = match std::env::args_os().nth(1) { let subcommand = match std::env::args_os().nth(1) {
None => { None => {

View file

@ -235,6 +235,15 @@ impl FromStr for Edition {
} }
} }
impl fmt::Display for Edition {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(match self {
Edition::Edition2015 => "2015",
Edition::Edition2018 => "2018",
})
}
}
impl Dependency { impl Dependency {
pub fn crate_id(&self) -> CrateId { pub fn crate_id(&self) -> CrateId {
self.crate_id self.crate_id

View file

@ -10,9 +10,11 @@ doctest = false
[dependencies] [dependencies]
log = "0.4.5" log = "0.4.5"
rustc-hash = "1.0" rustc-hash = "1.0"
either = "1.5"
ra_syntax = { path = "../ra_syntax" } ra_syntax = { path = "../ra_syntax" }
ra_db = { path = "../ra_db" } ra_db = { path = "../ra_db" }
ra_prof = { path = "../ra_prof" }
hir_expand = { path = "../ra_hir_expand", package = "ra_hir_expand" } hir_expand = { path = "../ra_hir_expand", package = "ra_hir_expand" }
hir_def = { path = "../ra_hir_def", package = "ra_hir_def" } hir_def = { path = "../ra_hir_def", package = "ra_hir_def" }
hir_ty = { path = "../ra_hir_ty", package = "ra_hir_ty" } hir_ty = { path = "../ra_hir_ty", package = "ra_hir_ty" }

View file

@ -1,36 +1,35 @@
//! FIXME: write short doc here //! FIXME: write short doc here
pub(crate) mod src;
use std::sync::Arc; use std::sync::Arc;
use either::Either;
use hir_def::{ use hir_def::{
adt::VariantData, adt::VariantData,
body::{Body, BodySourceMap},
builtin_type::BuiltinType, builtin_type::BuiltinType,
docs::Documentation, docs::Documentation,
expr::{BindingAnnotation, Pat, PatId}, expr::{BindingAnnotation, Pat, PatId},
nameres::ModuleSource,
per_ns::PerNs, per_ns::PerNs,
resolver::HasResolver, resolver::HasResolver,
type_ref::{Mutability, TypeRef}, type_ref::{Mutability, TypeRef},
AdtId, AstItemDef, ConstId, ContainerId, DefWithBodyId, EnumId, FunctionId, GenericDefId, AdtId, ConstId, DefWithBodyId, EnumId, FunctionId, HasModule, ImplId, LocalEnumVariantId,
HasModule, ImplId, LocalEnumVariantId, LocalImportId, LocalModuleId, LocalStructFieldId, LocalModuleId, LocalStructFieldId, Lookup, ModuleId, StaticId, StructId, TraitId, TypeAliasId,
Lookup, ModuleId, StaticId, StructId, TraitId, TypeAliasId, UnionId, TypeParamId, UnionId,
}; };
use hir_expand::{ use hir_expand::{
diagnostics::DiagnosticSink, diagnostics::DiagnosticSink,
name::{self, AsName}, name::{name, AsName},
AstId, MacroDefId, MacroDefId,
}; };
use hir_ty::expr::ExprValidator; use hir_ty::{
use ra_db::{CrateId, Edition, FileId, FilePosition}; autoderef, display::HirFormatter, expr::ExprValidator, ApplicationTy, Canonical, InEnvironment,
use ra_syntax::{ast, AstNode, SyntaxNode}; TraitEnvironment, Ty, TyDefId, TypeCtor, TypeWalk,
};
use ra_db::{CrateId, Edition, FileId};
use ra_syntax::ast;
use crate::{ use crate::{
db::{DefDatabase, HirDatabase}, db::{DefDatabase, HirDatabase},
ty::display::HirFormatter, CallableDef, HirDisplay, InFile, Name,
ty::{self, InEnvironment, InferenceResult, TraitEnvironment, Ty, TyDefId, TypeCtor, TypeWalk},
CallableDef, Either, HirDisplay, Name, Source,
}; };
/// hir::Crate describes a single crate. It's the main interface with which /// hir::Crate describes a single crate. It's the main interface with which
@ -38,7 +37,7 @@ use crate::{
/// root module. /// root module.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Crate { pub struct Crate {
pub(crate) crate_id: CrateId, pub(crate) id: CrateId,
} }
#[derive(Debug)] #[derive(Debug)]
@ -48,91 +47,43 @@ pub struct CrateDependency {
} }
impl Crate { impl Crate {
pub fn crate_id(self) -> CrateId {
self.crate_id
}
pub fn dependencies(self, db: &impl DefDatabase) -> Vec<CrateDependency> { pub fn dependencies(self, db: &impl DefDatabase) -> Vec<CrateDependency> {
db.crate_graph() db.crate_graph()
.dependencies(self.crate_id) .dependencies(self.id)
.map(|dep| { .map(|dep| {
let krate = Crate { crate_id: dep.crate_id() }; let krate = Crate { id: dep.crate_id() };
let name = dep.as_name(); let name = dep.as_name();
CrateDependency { krate, name } CrateDependency { krate, name }
}) })
.collect() .collect()
} }
// FIXME: add `transitive_reverse_dependencies`.
pub fn reverse_dependencies(self, db: &impl DefDatabase) -> Vec<Crate> {
let crate_graph = db.crate_graph();
crate_graph
.iter()
.filter(|&krate| crate_graph.dependencies(krate).any(|it| it.crate_id == self.id))
.map(|id| Crate { id })
.collect()
}
pub fn root_module(self, db: &impl DefDatabase) -> Option<Module> { pub fn root_module(self, db: &impl DefDatabase) -> Option<Module> {
let module_id = db.crate_def_map(self.crate_id).root; let module_id = db.crate_def_map(self.id).root;
Some(Module::new(self, module_id)) Some(Module::new(self, module_id))
} }
pub fn root_file(self, db: &impl DefDatabase) -> FileId {
db.crate_graph().crate_root(self.id)
}
pub fn edition(self, db: &impl DefDatabase) -> Edition { pub fn edition(self, db: &impl DefDatabase) -> Edition {
let crate_graph = db.crate_graph(); let crate_graph = db.crate_graph();
crate_graph.edition(self.crate_id) crate_graph.edition(self.id)
} }
pub fn all(db: &impl DefDatabase) -> Vec<Crate> { pub fn all(db: &impl DefDatabase) -> Vec<Crate> {
db.crate_graph().iter().map(|crate_id| Crate { crate_id }).collect() db.crate_graph().iter().map(|id| Crate { id }).collect()
}
}
pub enum ModuleSource {
SourceFile(ast::SourceFile),
Module(ast::Module),
}
impl ModuleSource {
pub fn new(
db: &impl DefDatabase,
file_id: Option<FileId>,
decl_id: Option<AstId<ast::Module>>,
) -> ModuleSource {
match (file_id, decl_id) {
(Some(file_id), _) => {
let source_file = db.parse(file_id).tree();
ModuleSource::SourceFile(source_file)
}
(None, Some(item_id)) => {
let module = item_id.to_node(db);
assert!(module.item_list().is_some(), "expected inline module");
ModuleSource::Module(module)
}
(None, None) => panic!(),
}
}
// FIXME: this methods do not belong here
pub fn from_position(db: &impl DefDatabase, position: FilePosition) -> ModuleSource {
let parse = db.parse(position.file_id);
match &ra_syntax::algo::find_node_at_offset::<ast::Module>(
parse.tree().syntax(),
position.offset,
) {
Some(m) if !m.has_semi() => ModuleSource::Module(m.clone()),
_ => {
let source_file = parse.tree();
ModuleSource::SourceFile(source_file)
}
}
}
pub fn from_child_node(db: &impl DefDatabase, child: Source<&SyntaxNode>) -> ModuleSource {
if let Some(m) =
child.value.ancestors().filter_map(ast::Module::cast).find(|it| !it.has_semi())
{
ModuleSource::Module(m)
} else {
let file_id = child.file_id.original_file(db);
let source_file = db.parse(file_id).tree();
ModuleSource::SourceFile(source_file)
}
}
pub fn from_file_id(db: &impl DefDatabase, file_id: FileId) -> ModuleSource {
let source_file = db.parse(file_id).tree();
ModuleSource::SourceFile(source_file)
} }
} }
@ -171,7 +122,7 @@ pub use hir_def::attr::Attrs;
impl Module { impl Module {
pub(crate) fn new(krate: Crate, crate_module_id: LocalModuleId) -> Module { pub(crate) fn new(krate: Crate, crate_module_id: LocalModuleId) -> Module {
Module { id: ModuleId { krate: krate.crate_id, local_id: crate_module_id } } Module { id: ModuleId { krate: krate.id, local_id: crate_module_id } }
} }
/// Name of this module. /// Name of this module.
@ -189,7 +140,7 @@ impl Module {
/// Returns the crate this module is part of. /// Returns the crate this module is part of.
pub fn krate(self) -> Crate { pub fn krate(self) -> Crate {
Crate { crate_id: self.id.krate } Crate { id: self.id.krate }
} }
/// Topmost parent of this module. Every module has a `crate_root`, but some /// Topmost parent of this module. Every module has a `crate_root`, but some
@ -200,13 +151,6 @@ impl Module {
self.with_module_id(def_map.root) self.with_module_id(def_map.root)
} }
/// Finds a child module with the specified name.
pub fn child(self, db: &impl DefDatabase, name: &Name) -> Option<Module> {
let def_map = db.crate_def_map(self.id.krate);
let child_id = def_map[self.id.local_id].children.get(name)?;
Some(self.with_module_id(*child_id))
}
/// Iterates over all child modules. /// Iterates over all child modules.
pub fn children(self, db: &impl DefDatabase) -> impl Iterator<Item = Module> { pub fn children(self, db: &impl DefDatabase) -> impl Iterator<Item = Module> {
let def_map = db.crate_def_map(self.id.krate); let def_map = db.crate_def_map(self.id.krate);
@ -236,13 +180,11 @@ impl Module {
} }
/// Returns a `ModuleScope`: a set of items, visible in this module. /// Returns a `ModuleScope`: a set of items, visible in this module.
pub fn scope(self, db: &impl HirDatabase) -> Vec<(Name, ScopeDef, Option<Import>)> { pub fn scope(self, db: &impl HirDatabase) -> Vec<(Name, ScopeDef)> {
db.crate_def_map(self.id.krate)[self.id.local_id] db.crate_def_map(self.id.krate)[self.id.local_id]
.scope .scope
.entries() .entries()
.map(|(name, res)| { .map(|(name, def)| (name.clone(), def.into()))
(name.clone(), res.def.into(), res.import.map(|id| Import { parent: self, id }))
})
.collect() .collect()
} }
@ -277,19 +219,14 @@ impl Module {
pub fn impl_blocks(self, db: &impl DefDatabase) -> Vec<ImplBlock> { pub fn impl_blocks(self, db: &impl DefDatabase) -> Vec<ImplBlock> {
let def_map = db.crate_def_map(self.id.krate); let def_map = db.crate_def_map(self.id.krate);
def_map[self.id.local_id].impls.iter().copied().map(ImplBlock::from).collect() def_map[self.id.local_id].scope.impls().map(ImplBlock::from).collect()
} }
fn with_module_id(self, module_id: LocalModuleId) -> Module { pub(crate) fn with_module_id(self, module_id: LocalModuleId) -> Module {
Module::new(self.krate(), module_id) Module::new(self.krate(), module_id)
} }
} }
pub struct Import {
pub(crate) parent: Module,
pub(crate) id: LocalImportId,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StructField { pub struct StructField {
pub(crate) parent: VariantDef, pub(crate) parent: VariantDef,
@ -307,8 +244,10 @@ impl StructField {
self.parent.variant_data(db).fields()[self.id].name.clone() self.parent.variant_data(db).fields()[self.id].name.clone()
} }
pub fn ty(&self, db: &impl HirDatabase) -> Ty { pub fn ty(&self, db: &impl HirDatabase) -> Type {
db.field_types(self.parent.into())[self.id].clone() let var_id = self.parent.into();
let ty = db.field_types(var_id)[self.id].clone();
Type::new(db, self.parent.module(db).id.krate.into(), var_id, ty)
} }
pub fn parent_def(&self, _db: &impl HirDatabase) -> VariantDef { pub fn parent_def(&self, _db: &impl HirDatabase) -> VariantDef {
@ -323,7 +262,7 @@ pub struct Struct {
impl Struct { impl Struct {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &impl DefDatabase) -> Module {
Module { id: self.id.module(db) } Module { id: self.id.lookup(db).container.module(db) }
} }
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> { pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> {
@ -343,21 +282,8 @@ impl Struct {
.collect() .collect()
} }
pub fn field(self, db: &impl HirDatabase, name: &Name) -> Option<StructField> {
db.struct_data(self.id.into())
.variant_data
.fields()
.iter()
.find(|(_id, data)| data.name == *name)
.map(|(id, _)| StructField { parent: self.into(), id })
}
pub fn ty(self, db: &impl HirDatabase) -> Type { pub fn ty(self, db: &impl HirDatabase) -> Type {
Type::from_def(db, self.id.module(db).krate, self.id) Type::from_def(db, self.id.lookup(db).container.module(db).krate, self.id)
}
pub fn constructor_ty(self, db: &impl HirDatabase) -> Ty {
db.value_ty(self.id.into())
} }
fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> { fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> {
@ -376,11 +302,11 @@ impl Union {
} }
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &impl DefDatabase) -> Module {
Module { id: self.id.module(db) } Module { id: self.id.lookup(db).container.module(db) }
} }
pub fn ty(self, db: &impl HirDatabase) -> Type { pub fn ty(self, db: &impl HirDatabase) -> Type {
Type::from_def(db, self.id.module(db).krate, self.id) Type::from_def(db, self.id.lookup(db).container.module(db).krate, self.id)
} }
pub fn fields(self, db: &impl HirDatabase) -> Vec<StructField> { pub fn fields(self, db: &impl HirDatabase) -> Vec<StructField> {
@ -392,15 +318,6 @@ impl Union {
.collect() .collect()
} }
pub fn field(self, db: &impl HirDatabase, name: &Name) -> Option<StructField> {
db.union_data(self.id)
.variant_data
.fields()
.iter()
.find(|(_id, data)| data.name == *name)
.map(|(id, _)| StructField { parent: self.into(), id })
}
fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> { fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> {
db.union_data(self.id).variant_data.clone() db.union_data(self.id).variant_data.clone()
} }
@ -413,7 +330,7 @@ pub struct Enum {
impl Enum { impl Enum {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &impl DefDatabase) -> Module {
Module { id: self.id.module(db) } Module { id: self.id.lookup(db).container.module(db) }
} }
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> { pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> {
@ -432,13 +349,8 @@ impl Enum {
.collect() .collect()
} }
pub fn variant(self, db: &impl DefDatabase, name: &Name) -> Option<EnumVariant> {
let id = db.enum_data(self.id).variant(name)?;
Some(EnumVariant { parent: self, id })
}
pub fn ty(self, db: &impl HirDatabase) -> Type { pub fn ty(self, db: &impl HirDatabase) -> Type {
Type::from_def(db, self.id.module(db).krate, self.id) Type::from_def(db, self.id.lookup(db).container.module(db).krate, self.id)
} }
} }
@ -468,14 +380,6 @@ impl EnumVariant {
.collect() .collect()
} }
pub fn field(self, db: &impl HirDatabase, name: &Name) -> Option<StructField> {
self.variant_data(db)
.fields()
.iter()
.find(|(_id, data)| data.name == *name)
.map(|(id, _)| StructField { parent: self.into(), id })
}
pub(crate) fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> { pub(crate) fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> {
db.enum_data(self.parent.id).variants[self.id].variant_data.clone() db.enum_data(self.parent.id).variants[self.id].variant_data.clone()
} }
@ -593,48 +497,8 @@ impl Function {
db.function_data(self.id).params.clone() db.function_data(self.id).params.clone()
} }
pub fn body_source_map(self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
db.body_with_source_map(self.id.into()).1
}
pub fn body(self, db: &impl HirDatabase) -> Arc<Body> {
db.body(self.id.into())
}
pub fn ty(self, db: &impl HirDatabase) -> Ty {
db.value_ty(self.id.into())
}
pub fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult> {
db.infer(self.id.into())
}
/// The containing impl block, if this is a method.
pub fn impl_block(self, db: &impl DefDatabase) -> Option<ImplBlock> {
match self.container(db) {
Some(Container::ImplBlock(it)) => Some(it),
_ => None,
}
}
/// The containing trait, if this is a trait method definition.
pub fn parent_trait(self, db: &impl DefDatabase) -> Option<Trait> {
match self.container(db) {
Some(Container::Trait(it)) => Some(it),
_ => None,
}
}
pub fn container(self, db: &impl DefDatabase) -> Option<Container> {
match self.id.lookup(db).container {
ContainerId::TraitId(it) => Some(Container::Trait(it.into())),
ContainerId::ImplId(it) => Some(Container::ImplBlock(it.into())),
ContainerId::ModuleId(_) => None,
}
}
pub fn diagnostics(self, db: &impl HirDatabase, sink: &mut DiagnosticSink) { pub fn diagnostics(self, db: &impl HirDatabase, sink: &mut DiagnosticSink) {
let infer = self.infer(db); let infer = db.infer(self.id.into());
infer.add_diagnostics(db, self.id, sink); infer.add_diagnostics(db, self.id, sink);
let mut validator = ExprValidator::new(self.id, infer, sink); let mut validator = ExprValidator::new(self.id, infer, sink);
validator.validate_body(db); validator.validate_body(db);
@ -658,34 +522,6 @@ impl Const {
pub fn name(self, db: &impl HirDatabase) -> Option<Name> { pub fn name(self, db: &impl HirDatabase) -> Option<Name> {
db.const_data(self.id).name.clone() db.const_data(self.id).name.clone()
} }
pub fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult> {
db.infer(self.id.into())
}
/// The containing impl block, if this is a type alias.
pub fn impl_block(self, db: &impl DefDatabase) -> Option<ImplBlock> {
match self.container(db) {
Some(Container::ImplBlock(it)) => Some(it),
_ => None,
}
}
/// The containing trait, if this is a trait type alias definition.
pub fn parent_trait(self, db: &impl DefDatabase) -> Option<Trait> {
match self.container(db) {
Some(Container::Trait(it)) => Some(it),
_ => None,
}
}
pub fn container(self, db: &impl DefDatabase) -> Option<Container> {
match self.id.lookup(db).container {
ContainerId::TraitId(it) => Some(Container::Trait(it.into())),
ContainerId::ImplId(it) => Some(Container::ImplBlock(it.into())),
ContainerId::ModuleId(_) => None,
}
}
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -701,10 +537,6 @@ impl Static {
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> { pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> {
Some(self.module(db).krate()) Some(self.module(db).krate())
} }
pub fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult> {
db.infer(self.id.into())
}
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -714,7 +546,7 @@ pub struct Trait {
impl Trait { impl Trait {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &impl DefDatabase) -> Module {
Module { id: self.id.module(db) } Module { id: self.id.lookup(db).container.module(db) }
} }
pub fn name(self, db: &impl DefDatabase) -> Name { pub fn name(self, db: &impl DefDatabase) -> Name {
@ -749,30 +581,6 @@ impl TypeAlias {
Some(self.module(db).krate()) Some(self.module(db).krate())
} }
/// The containing impl block, if this is a type alias.
pub fn impl_block(self, db: &impl DefDatabase) -> Option<ImplBlock> {
match self.container(db) {
Some(Container::ImplBlock(it)) => Some(it),
_ => None,
}
}
/// The containing trait, if this is a trait type alias definition.
pub fn parent_trait(self, db: &impl DefDatabase) -> Option<Trait> {
match self.container(db) {
Some(Container::Trait(it)) => Some(it),
_ => None,
}
}
pub fn container(self, db: &impl DefDatabase) -> Option<Container> {
match self.id.lookup(db).container {
ContainerId::TraitId(it) => Some(Container::Trait(it.into())),
ContainerId::ImplId(it) => Some(Container::ImplBlock(it.into())),
ContainerId::ModuleId(_) => None,
}
}
pub fn type_ref(self, db: &impl DefDatabase) -> Option<TypeRef> { pub fn type_ref(self, db: &impl DefDatabase) -> Option<TypeRef> {
db.type_alias_data(self.id).type_ref.clone() db.type_alias_data(self.id).type_ref.clone()
} }
@ -791,14 +599,6 @@ pub struct MacroDef {
pub(crate) id: MacroDefId, pub(crate) id: MacroDefId,
} }
impl MacroDef {}
pub enum Container {
Trait(Trait),
ImplBlock(ImplBlock),
}
impl_froms!(Container: Trait, ImplBlock);
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum AssocItem { pub enum AssocItem {
Function(Function), Function(Function),
@ -819,15 +619,6 @@ impl AssocItem {
AssocItem::TypeAlias(t) => t.module(db), AssocItem::TypeAlias(t) => t.module(db),
} }
} }
pub fn container(self, db: &impl DefDatabase) -> Container {
match self {
AssocItem::Function(f) => f.container(db),
AssocItem::Const(c) => c.container(db),
AssocItem::TypeAlias(t) => t.container(db),
}
.expect("AssocItem without container")
}
} }
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] #[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
@ -869,7 +660,7 @@ impl Local {
} }
pub fn is_self(self, db: &impl HirDatabase) -> bool { pub fn is_self(self, db: &impl HirDatabase) -> bool {
self.name(db) == Some(name::SELF_PARAM) self.name(db) == Some(name![self])
} }
pub fn is_mut(self, db: &impl HirDatabase) -> bool { pub fn is_mut(self, db: &impl HirDatabase) -> bool {
@ -901,18 +692,30 @@ impl Local {
Type { krate, ty: InEnvironment { value: ty, environment } } Type { krate, ty: InEnvironment { value: ty, environment } }
} }
pub fn source(self, db: &impl HirDatabase) -> Source<Either<ast::BindPat, ast::SelfParam>> { pub fn source(self, db: &impl HirDatabase) -> InFile<Either<ast::BindPat, ast::SelfParam>> {
let (_body, source_map) = db.body_with_source_map(self.parent.into()); let (_body, source_map) = db.body_with_source_map(self.parent.into());
let src = source_map.pat_syntax(self.pat_id).unwrap(); // Hmm... let src = source_map.pat_syntax(self.pat_id).unwrap(); // Hmm...
let root = src.file_syntax(db); let root = src.file_syntax(db);
src.map(|ast| ast.map(|it| it.cast().unwrap().to_node(&root), |it| it.to_node(&root))) src.map(|ast| {
ast.map_left(|it| it.cast().unwrap().to_node(&root)).map_right(|it| it.to_node(&root))
})
} }
} }
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct GenericParam { pub struct TypeParam {
pub(crate) parent: GenericDefId, pub(crate) id: TypeParamId,
pub(crate) idx: u32, }
impl TypeParam {
pub fn name(self, db: &impl HirDatabase) -> Name {
let params = db.generic_params(self.id.parent);
params.types[self.id.local_id].name.clone()
}
pub fn module(self, db: &impl HirDatabase) -> Module {
self.id.parent.module(db).into()
}
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -922,11 +725,11 @@ pub struct ImplBlock {
impl ImplBlock { impl ImplBlock {
pub fn all_in_crate(db: &impl HirDatabase, krate: Crate) -> Vec<ImplBlock> { pub fn all_in_crate(db: &impl HirDatabase, krate: Crate) -> Vec<ImplBlock> {
let impls = db.impls_in_crate(krate.crate_id); let impls = db.impls_in_crate(krate.id);
impls.all_impls().map(Self::from).collect() impls.all_impls().map(Self::from).collect()
} }
pub fn for_trait(db: &impl HirDatabase, krate: Crate, trait_: Trait) -> Vec<ImplBlock> { pub fn for_trait(db: &impl HirDatabase, krate: Crate, trait_: Trait) -> Vec<ImplBlock> {
let impls = db.impls_in_crate(krate.crate_id); let impls = db.impls_in_crate(krate.id);
impls.lookup_impl_blocks_for_trait(trait_.id).map(Self::from).collect() impls.lookup_impl_blocks_for_trait(trait_.id).map(Self::from).collect()
} }
@ -943,7 +746,10 @@ impl ImplBlock {
let resolver = self.id.resolver(db); let resolver = self.id.resolver(db);
let environment = TraitEnvironment::lower(db, &resolver); let environment = TraitEnvironment::lower(db, &resolver);
let ty = Ty::from_hir(db, &resolver, &impl_data.target_type); let ty = Ty::from_hir(db, &resolver, &impl_data.target_type);
Type { krate: self.id.module(db).krate, ty: InEnvironment { value: ty, environment } } Type {
krate: self.id.lookup(db).container.module(db).krate,
ty: InEnvironment { value: ty, environment },
}
} }
pub fn items(&self, db: &impl DefDatabase) -> Vec<AssocItem> { pub fn items(&self, db: &impl DefDatabase) -> Vec<AssocItem> {
@ -955,11 +761,11 @@ impl ImplBlock {
} }
pub fn module(&self, db: &impl DefDatabase) -> Module { pub fn module(&self, db: &impl DefDatabase) -> Module {
self.id.module(db).into() self.id.lookup(db).container.module(db).into()
} }
pub fn krate(&self, db: &impl DefDatabase) -> Crate { pub fn krate(&self, db: &impl DefDatabase) -> Crate {
Crate { crate_id: self.module(db).id.krate } Crate { id: self.module(db).id.krate }
} }
} }
@ -970,15 +776,19 @@ pub struct Type {
} }
impl Type { impl Type {
fn new(db: &impl HirDatabase, krate: CrateId, lexical_env: impl HasResolver, ty: Ty) -> Type {
let resolver = lexical_env.resolver(db);
let environment = TraitEnvironment::lower(db, &resolver);
Type { krate, ty: InEnvironment { value: ty, environment } }
}
fn from_def( fn from_def(
db: &impl HirDatabase, db: &impl HirDatabase,
krate: CrateId, krate: CrateId,
def: impl HasResolver + Into<TyDefId>, def: impl HasResolver + Into<TyDefId>,
) -> Type { ) -> Type {
let resolver = def.resolver(db);
let environment = TraitEnvironment::lower(db, &resolver);
let ty = db.ty(def.into()); let ty = db.ty(def.into());
Type { krate, ty: InEnvironment { value: ty, environment } } Type::new(db, krate, def, ty)
} }
pub fn is_bool(&self) -> bool { pub fn is_bool(&self) -> bool {
@ -1028,7 +838,7 @@ impl Type {
pub fn fields(&self, db: &impl HirDatabase) -> Vec<(StructField, Type)> { pub fn fields(&self, db: &impl HirDatabase) -> Vec<(StructField, Type)> {
if let Ty::Apply(a_ty) = &self.ty.value { if let Ty::Apply(a_ty) = &self.ty.value {
match a_ty.ctor { match a_ty.ctor {
ty::TypeCtor::Adt(AdtId::StructId(s)) => { TypeCtor::Adt(AdtId::StructId(s)) => {
let var_def = s.into(); let var_def = s.into();
return db return db
.field_types(var_def) .field_types(var_def)
@ -1050,7 +860,7 @@ impl Type {
let mut res = Vec::new(); let mut res = Vec::new();
if let Ty::Apply(a_ty) = &self.ty.value { if let Ty::Apply(a_ty) = &self.ty.value {
match a_ty.ctor { match a_ty.ctor {
ty::TypeCtor::Tuple { .. } => { TypeCtor::Tuple { .. } => {
for ty in a_ty.parameters.iter() { for ty in a_ty.parameters.iter() {
let ty = ty.clone().subst(&a_ty.parameters); let ty = ty.clone().subst(&a_ty.parameters);
res.push(self.derived(ty)); res.push(self.derived(ty));
@ -1069,11 +879,16 @@ impl Type {
) -> Vec<(StructField, Type)> { ) -> Vec<(StructField, Type)> {
// FIXME: check that ty and def match // FIXME: check that ty and def match
match &self.ty.value { match &self.ty.value {
Ty::Apply(a_ty) => def Ty::Apply(a_ty) => {
.fields(db) let field_types = db.field_types(def.into());
.into_iter() def.fields(db)
.map(|it| (it, self.derived(it.ty(db).subst(&a_ty.parameters)))) .into_iter()
.collect(), .map(|it| {
let ty = field_types[it.id].clone().subst(&a_ty.parameters);
(it, self.derived(ty))
})
.collect()
}
_ => Vec::new(), _ => Vec::new(),
} }
} }
@ -1081,10 +896,10 @@ impl Type {
pub fn autoderef<'a>(&'a self, db: &'a impl HirDatabase) -> impl Iterator<Item = Type> + 'a { pub fn autoderef<'a>(&'a self, db: &'a impl HirDatabase) -> impl Iterator<Item = Type> + 'a {
// There should be no inference vars in types passed here // There should be no inference vars in types passed here
// FIXME check that? // FIXME check that?
let canonical = crate::ty::Canonical { value: self.ty.value.clone(), num_vars: 0 }; let canonical = Canonical { value: self.ty.value.clone(), num_vars: 0 };
let environment = self.ty.environment.clone(); let environment = self.ty.environment.clone();
let ty = InEnvironment { value: canonical, environment: environment.clone() }; let ty = InEnvironment { value: canonical, environment: environment.clone() };
ty::autoderef(db, Some(self.krate), ty) autoderef(db, Some(self.krate), ty)
.map(|canonical| canonical.value) .map(|canonical| canonical.value)
.map(move |ty| self.derived(ty)) .map(move |ty| self.derived(ty))
} }
@ -1097,7 +912,7 @@ impl Type {
krate: Crate, krate: Crate,
mut callback: impl FnMut(AssocItem) -> Option<T>, mut callback: impl FnMut(AssocItem) -> Option<T>,
) -> Option<T> { ) -> Option<T> {
for krate in self.ty.value.def_crates(db, krate.crate_id)? { for krate in self.ty.value.def_crates(db, krate.id)? {
let impls = db.impls_in_crate(krate); let impls = db.impls_in_crate(krate);
for impl_block in impls.lookup_impl_blocks(&self.ty.value) { for impl_block in impls.lookup_impl_blocks(&self.ty.value) {
@ -1111,11 +926,6 @@ impl Type {
None None
} }
// FIXME: remove
pub fn into_ty(self) -> Ty {
self.ty.value
}
pub fn as_adt(&self) -> Option<Adt> { pub fn as_adt(&self) -> Option<Adt> {
let (adt, _subst) = self.ty.value.as_adt()?; let (adt, _subst) = self.ty.value.as_adt()?;
Some(adt.into()) Some(adt.into())
@ -1124,15 +934,14 @@ impl Type {
// FIXME: provide required accessors such that it becomes implementable from outside. // FIXME: provide required accessors such that it becomes implementable from outside.
pub fn is_equal_for_find_impls(&self, other: &Type) -> bool { pub fn is_equal_for_find_impls(&self, other: &Type) -> bool {
match (&self.ty.value, &other.ty.value) { match (&self.ty.value, &other.ty.value) {
(Ty::Apply(a_original_ty), Ty::Apply(ty::ApplicationTy { ctor, parameters })) => { (Ty::Apply(a_original_ty), Ty::Apply(ApplicationTy { ctor, parameters })) => match ctor
match ctor { {
TypeCtor::Ref(..) => match parameters.as_single() { TypeCtor::Ref(..) => match parameters.as_single() {
Ty::Apply(a_ty) => a_original_ty.ctor == a_ty.ctor, Ty::Apply(a_ty) => a_original_ty.ctor == a_ty.ctor,
_ => false, _ => false,
}, },
_ => a_original_ty.ctor == *ctor, _ => a_original_ty.ctor == *ctor,
} },
}
_ => false, _ => false,
} }
} }
@ -1155,7 +964,7 @@ impl HirDisplay for Type {
pub enum ScopeDef { pub enum ScopeDef {
ModuleDef(ModuleDef), ModuleDef(ModuleDef),
MacroDef(MacroDef), MacroDef(MacroDef),
GenericParam(GenericParam), GenericParam(TypeParam),
ImplSelfType(ImplBlock), ImplSelfType(ImplBlock),
AdtSelfType(Adt), AdtSelfType(Adt),
Local(Local), Local(Local),

View file

@ -1,128 +0,0 @@
//! FIXME: write short doc here
use hir_def::{AstItemDef, HasChildSource, HasSource as _, Lookup, VariantId};
use hir_expand::either::Either;
use ra_syntax::ast;
use crate::{
db::DefDatabase, Const, Enum, EnumVariant, FieldSource, Function, ImplBlock, Import, MacroDef,
Module, ModuleSource, Static, Struct, StructField, Trait, TypeAlias, Union,
};
pub use hir_expand::Source;
pub trait HasSource {
type Ast;
fn source(self, db: &impl DefDatabase) -> Source<Self::Ast>;
}
/// NB: Module is !HasSource, because it has two source nodes at the same time:
/// definition and declaration.
impl Module {
/// Returns a node which defines this module. That is, a file or a `mod foo {}` with items.
pub fn definition_source(self, db: &impl DefDatabase) -> Source<ModuleSource> {
let def_map = db.crate_def_map(self.id.krate);
let src = def_map[self.id.local_id].definition_source(db);
src.map(|it| match it {
Either::A(it) => ModuleSource::SourceFile(it),
Either::B(it) => ModuleSource::Module(it),
})
}
/// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
/// `None` for the crate root.
pub fn declaration_source(self, db: &impl DefDatabase) -> Option<Source<ast::Module>> {
let def_map = db.crate_def_map(self.id.krate);
def_map[self.id.local_id].declaration_source(db)
}
}
impl HasSource for StructField {
type Ast = FieldSource;
fn source(self, db: &impl DefDatabase) -> Source<FieldSource> {
let var = VariantId::from(self.parent);
let src = var.child_source(db);
src.map(|it| match it[self.id].clone() {
Either::A(it) => FieldSource::Pos(it),
Either::B(it) => FieldSource::Named(it),
})
}
}
impl HasSource for Struct {
type Ast = ast::StructDef;
fn source(self, db: &impl DefDatabase) -> Source<ast::StructDef> {
self.id.source(db)
}
}
impl HasSource for Union {
type Ast = ast::UnionDef;
fn source(self, db: &impl DefDatabase) -> Source<ast::UnionDef> {
self.id.source(db)
}
}
impl HasSource for Enum {
type Ast = ast::EnumDef;
fn source(self, db: &impl DefDatabase) -> Source<ast::EnumDef> {
self.id.source(db)
}
}
impl HasSource for EnumVariant {
type Ast = ast::EnumVariant;
fn source(self, db: &impl DefDatabase) -> Source<ast::EnumVariant> {
self.parent.id.child_source(db).map(|map| map[self.id].clone())
}
}
impl HasSource for Function {
type Ast = ast::FnDef;
fn source(self, db: &impl DefDatabase) -> Source<ast::FnDef> {
self.id.lookup(db).source(db)
}
}
impl HasSource for Const {
type Ast = ast::ConstDef;
fn source(self, db: &impl DefDatabase) -> Source<ast::ConstDef> {
self.id.lookup(db).source(db)
}
}
impl HasSource for Static {
type Ast = ast::StaticDef;
fn source(self, db: &impl DefDatabase) -> Source<ast::StaticDef> {
self.id.lookup(db).source(db)
}
}
impl HasSource for Trait {
type Ast = ast::TraitDef;
fn source(self, db: &impl DefDatabase) -> Source<ast::TraitDef> {
self.id.source(db)
}
}
impl HasSource for TypeAlias {
type Ast = ast::TypeAliasDef;
fn source(self, db: &impl DefDatabase) -> Source<ast::TypeAliasDef> {
self.id.lookup(db).source(db)
}
}
impl HasSource for MacroDef {
type Ast = ast::MacroCall;
fn source(self, db: &impl DefDatabase) -> Source<ast::MacroCall> {
Source { file_id: self.id.ast_id.file_id(), value: self.id.ast_id.to_node(db) }
}
}
impl HasSource for ImplBlock {
type Ast = ast::ImplBlock;
fn source(self, db: &impl DefDatabase) -> Source<ast::ImplBlock> {
self.id.source(db)
}
}
impl HasSource for Import {
type Ast = Either<ast::UseTree, ast::ExternCrateItem>;
/// Returns the syntax of the last path segment corresponding to this import
fn source(self, db: &impl DefDatabase) -> Source<Self::Ast> {
let src = self.parent.definition_source(db);
let (_, source_map) = db.raw_items_with_source_map(src.file_id);
let root = db.parse_or_expand(src.file_id).unwrap();
let ptr = source_map.get(self.id);
src.with_value(ptr.map(|it| it.to_node(&root), |it| it.to_node(&root)))
}
}

View file

@ -4,8 +4,8 @@ pub use hir_def::db::{
BodyQuery, BodyWithSourceMapQuery, ConstDataQuery, CrateDefMapQuery, CrateLangItemsQuery, BodyQuery, BodyWithSourceMapQuery, ConstDataQuery, CrateDefMapQuery, CrateLangItemsQuery,
DefDatabase, DefDatabaseStorage, DocumentationQuery, EnumDataQuery, ExprScopesQuery, DefDatabase, DefDatabaseStorage, DocumentationQuery, EnumDataQuery, ExprScopesQuery,
FunctionDataQuery, GenericParamsQuery, ImplDataQuery, InternDatabase, InternDatabaseStorage, FunctionDataQuery, GenericParamsQuery, ImplDataQuery, InternDatabase, InternDatabaseStorage,
LangItemQuery, ModuleLangItemsQuery, RawItemsQuery, RawItemsWithSourceMapQuery, LangItemQuery, ModuleLangItemsQuery, RawItemsQuery, StaticDataQuery, StructDataQuery,
StaticDataQuery, StructDataQuery, TraitDataQuery, TypeAliasDataQuery, TraitDataQuery, TypeAliasDataQuery,
}; };
pub use hir_expand::db::{ pub use hir_expand::db::{
AstDatabase, AstDatabaseStorage, AstIdMapQuery, MacroArgQuery, MacroDefQuery, MacroExpandQuery, AstDatabase, AstDatabaseStorage, AstIdMapQuery, MacroArgQuery, MacroDefQuery, MacroExpandQuery,

View file

@ -1,94 +0,0 @@
//! XXX: This does not work at the moment.
//!
//! printf debugging infrastructure for rust-analyzer.
//!
//! When you print a hir type, like a module, using `eprintln!("{:?}", module)`,
//! you usually get back a numeric ID, which doesn't tell you much:
//! `Module(92)`.
//!
//! This module adds convenience `debug` methods to various types, which resolve
//! the id to a human-readable location info:
//!
//! ```not_rust
//! eprintln!("{:?}", module.debug(db));
//! =>
//! Module { name: collections, path: "liballoc/collections/mod.rs" }
//! ```
//!
//! Note that to get this info, we might need to execute queries! So
//!
//! * don't use the `debug` methods for logging
//! * when debugging, be aware that interference is possible.
use std::fmt;
use hir_expand::HirFileId;
use ra_db::{CrateId, FileId};
use crate::{db::HirDatabase, Crate, Module, Name};
impl Crate {
pub fn debug(self, db: &impl HirDebugDatabase) -> impl fmt::Debug + '_ {
debug_fn(move |fmt| db.debug_crate(self, fmt))
}
}
impl Module {
pub fn debug(self, db: &impl HirDebugDatabase) -> impl fmt::Debug + '_ {
debug_fn(move |fmt| db.debug_module(self, fmt))
}
}
pub trait HirDebugHelper: HirDatabase {
fn crate_name(&self, _krate: CrateId) -> Option<String> {
None
}
fn file_path(&self, _file_id: FileId) -> Option<String> {
None
}
}
pub trait HirDebugDatabase {
fn debug_crate(&self, krate: Crate, fmt: &mut fmt::Formatter<'_>) -> fmt::Result;
fn debug_module(&self, module: Module, fmt: &mut fmt::Formatter<'_>) -> fmt::Result;
fn debug_hir_file_id(&self, file_id: HirFileId, fmt: &mut fmt::Formatter<'_>) -> fmt::Result;
}
impl<DB: HirDebugHelper> HirDebugDatabase for DB {
fn debug_crate(&self, krate: Crate, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut builder = fmt.debug_tuple("Crate");
match self.crate_name(krate.crate_id) {
Some(name) => builder.field(&name),
None => builder.field(&krate.crate_id),
}
.finish()
}
fn debug_module(&self, module: Module, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
let file_id = module.definition_source(self).file_id.original_file(self);
let path = self.file_path(file_id).unwrap_or_else(|| "N/A".to_string());
fmt.debug_struct("Module")
.field("name", &module.name(self).unwrap_or_else(Name::missing))
.field("path", &path)
.finish()
}
fn debug_hir_file_id(&self, file_id: HirFileId, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
let original = file_id.original_file(self);
let path = self.file_path(original).unwrap_or_else(|| "N/A".to_string());
let is_macro = file_id != original.into();
fmt.debug_struct("HirFileId").field("path", &path).field("macro", &is_macro).finish()
}
}
fn debug_fn(f: impl Fn(&mut fmt::Formatter<'_>) -> fmt::Result) -> impl fmt::Debug {
struct DebugFn<F>(F);
impl<F: Fn(&mut fmt::Formatter<'_>) -> fmt::Result> fmt::Debug for DebugFn<F> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
(&self.0)(fmt)
}
}
DebugFn(f)
}

View file

@ -9,16 +9,10 @@ use hir_def::{
}; };
use crate::{ use crate::{
Adt, AssocItem, AttrDef, Crate, DefWithBody, EnumVariant, GenericDef, ModuleDef, StructField, Adt, AssocItem, AttrDef, DefWithBody, EnumVariant, GenericDef, ModuleDef, StructField,
VariantDef, VariantDef,
}; };
impl From<ra_db::CrateId> for Crate {
fn from(crate_id: ra_db::CrateId) -> Self {
Crate { crate_id }
}
}
macro_rules! from_id { macro_rules! from_id {
($(($id:path, $ty:path)),*) => {$( ($(($id:path, $ty:path)),*) => {$(
impl From<$id> for $ty { impl From<$id> for $ty {
@ -26,10 +20,16 @@ macro_rules! from_id {
$ty { id } $ty { id }
} }
} }
impl From<$ty> for $id {
fn from(ty: $ty) -> $id {
ty.id
}
}
)*} )*}
} }
from_id![ from_id![
(ra_db::CrateId, crate::Crate),
(hir_def::ModuleId, crate::Module), (hir_def::ModuleId, crate::Module),
(hir_def::StructId, crate::Struct), (hir_def::StructId, crate::Struct),
(hir_def::UnionId, crate::Union), (hir_def::UnionId, crate::Union),

View file

@ -1,219 +1,140 @@
//! FIXME: write short doc here //! Finds a corresponding hir data structure for a syntax node in a specific
//! file.
use hir_def::{AstItemDef, LocationCtx, ModuleId}; use hir_def::{
child_by_source::ChildBySource, dyn_map::DynMap, keys, keys::Key, nameres::ModuleSource,
ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, GenericDefId, ImplId, ModuleId,
StaticId, StructId, TraitId, TypeAliasId, UnionId, VariantId,
};
use hir_expand::{name::AsName, AstId, MacroDefId, MacroDefKind}; use hir_expand::{name::AsName, AstId, MacroDefId, MacroDefKind};
use ra_db::FileId;
use ra_prof::profile;
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode, NameOwner}, ast::{self, AstNode, NameOwner},
match_ast, AstPtr, SyntaxNode, match_ast, SyntaxNode,
}; };
use crate::{ use crate::{
db::{AstDatabase, DefDatabase, HirDatabase}, db::{DefDatabase, HirDatabase},
AssocItem, Const, DefWithBody, Enum, EnumVariant, FieldSource, Function, HasSource, ImplBlock, Const, DefWithBody, Enum, EnumVariant, FieldSource, Function, ImplBlock, InFile, Local,
Local, MacroDef, Module, ModuleDef, ModuleSource, Source, Static, Struct, StructField, Trait, MacroDef, Module, Static, Struct, StructField, Trait, TypeAlias, TypeParam, Union,
TypeAlias, Union, VariantDef,
}; };
pub trait FromSource: Sized { pub trait FromSource: Sized {
type Ast; type Ast;
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self>; fn from_source(db: &impl DefDatabase, src: InFile<Self::Ast>) -> Option<Self>;
} }
impl FromSource for Struct { pub trait FromSourceByContainer: Sized {
type Ast = ast::StructDef; type Ast: AstNode + 'static;
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> { type Id: Copy + 'static;
let id = from_source(db, src)?; const KEY: Key<Self::Ast, Self::Id>;
Some(Struct { id })
}
} }
impl FromSource for Union {
type Ast = ast::UnionDef; impl<T: FromSourceByContainer> FromSource for T
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> { where
let id = from_source(db, src)?; T: From<<T as FromSourceByContainer>::Id>,
Some(Union { id }) {
} type Ast = <T as FromSourceByContainer>::Ast;
} fn from_source(db: &impl DefDatabase, src: InFile<Self::Ast>) -> Option<Self> {
impl FromSource for Enum { analyze_container(db, src.as_ref().map(|it| it.syntax()))[T::KEY]
type Ast = ast::EnumDef; .get(&src)
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> { .copied()
let id = from_source(db, src)?; .map(Self::from)
Some(Enum { id })
}
}
impl FromSource for Trait {
type Ast = ast::TraitDef;
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
let id = from_source(db, src)?;
Some(Trait { id })
}
}
impl FromSource for Function {
type Ast = ast::FnDef;
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
let items = match Container::find(db, src.as_ref().map(|it| it.syntax()))? {
Container::Trait(it) => it.items(db),
Container::ImplBlock(it) => it.items(db),
Container::Module(m) => {
return m
.declarations(db)
.into_iter()
.filter_map(|it| match it {
ModuleDef::Function(it) => Some(it),
_ => None,
})
.find(|it| same_source(&it.source(db), &src))
}
};
items
.into_iter()
.filter_map(|it| match it {
AssocItem::Function(it) => Some(it),
_ => None,
})
.find(|it| same_source(&it.source(db), &src))
} }
} }
impl FromSource for Const { macro_rules! from_source_by_container_impls {
type Ast = ast::ConstDef; ($(($hir:ident, $id:ident, $ast:path, $key:path)),* ,) => {$(
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> { impl FromSourceByContainer for $hir {
let items = match Container::find(db, src.as_ref().map(|it| it.syntax()))? { type Ast = $ast;
Container::Trait(it) => it.items(db), type Id = $id;
Container::ImplBlock(it) => it.items(db), const KEY: Key<Self::Ast, Self::Id> = $key;
Container::Module(m) => { }
return m )*}
.declarations(db)
.into_iter()
.filter_map(|it| match it {
ModuleDef::Const(it) => Some(it),
_ => None,
})
.find(|it| same_source(&it.source(db), &src))
}
};
items
.into_iter()
.filter_map(|it| match it {
AssocItem::Const(it) => Some(it),
_ => None,
})
.find(|it| same_source(&it.source(db), &src))
}
}
impl FromSource for Static {
type Ast = ast::StaticDef;
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
let module = match Container::find(db, src.as_ref().map(|it| it.syntax()))? {
Container::Module(it) => it,
Container::Trait(_) | Container::ImplBlock(_) => return None,
};
module
.declarations(db)
.into_iter()
.filter_map(|it| match it {
ModuleDef::Static(it) => Some(it),
_ => None,
})
.find(|it| same_source(&it.source(db), &src))
}
} }
impl FromSource for TypeAlias { from_source_by_container_impls![
type Ast = ast::TypeAliasDef; (Struct, StructId, ast::StructDef, keys::STRUCT),
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> { (Union, UnionId, ast::UnionDef, keys::UNION),
let items = match Container::find(db, src.as_ref().map(|it| it.syntax()))? { (Enum, EnumId, ast::EnumDef, keys::ENUM),
Container::Trait(it) => it.items(db), (Trait, TraitId, ast::TraitDef, keys::TRAIT),
Container::ImplBlock(it) => it.items(db), (Function, FunctionId, ast::FnDef, keys::FUNCTION),
Container::Module(m) => { (Static, StaticId, ast::StaticDef, keys::STATIC),
return m (Const, ConstId, ast::ConstDef, keys::CONST),
.declarations(db) (TypeAlias, TypeAliasId, ast::TypeAliasDef, keys::TYPE_ALIAS),
.into_iter() (ImplBlock, ImplId, ast::ImplBlock, keys::IMPL),
.filter_map(|it| match it { ];
ModuleDef::TypeAlias(it) => Some(it),
_ => None,
})
.find(|it| same_source(&it.source(db), &src))
}
};
items
.into_iter()
.filter_map(|it| match it {
AssocItem::TypeAlias(it) => Some(it),
_ => None,
})
.find(|it| same_source(&it.source(db), &src))
}
}
impl FromSource for MacroDef { impl FromSource for MacroDef {
type Ast = ast::MacroCall; type Ast = ast::MacroCall;
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> { fn from_source(db: &impl DefDatabase, src: InFile<Self::Ast>) -> Option<Self> {
let kind = MacroDefKind::Declarative; let kind = MacroDefKind::Declarative;
let module_src = ModuleSource::from_child_node(db, src.as_ref().map(|it| it.syntax())); let module_src = ModuleSource::from_child_node(db, src.as_ref().map(|it| it.syntax()));
let module = Module::from_definition(db, Source::new(src.file_id, module_src))?; let module = Module::from_definition(db, InFile::new(src.file_id, module_src))?;
let krate = module.krate().crate_id(); let krate = Some(module.krate().id);
let ast_id = AstId::new(src.file_id, db.ast_id_map(src.file_id).ast_id(&src.value)); let ast_id = Some(AstId::new(src.file_id, db.ast_id_map(src.file_id).ast_id(&src.value)));
let id: MacroDefId = MacroDefId { krate, ast_id, kind }; let id: MacroDefId = MacroDefId { krate, ast_id, kind };
Some(MacroDef { id }) Some(MacroDef { id })
} }
} }
impl FromSource for ImplBlock {
type Ast = ast::ImplBlock;
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
let id = from_source(db, src)?;
Some(ImplBlock { id })
}
}
impl FromSource for EnumVariant { impl FromSource for EnumVariant {
type Ast = ast::EnumVariant; type Ast = ast::EnumVariant;
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> { fn from_source(db: &impl DefDatabase, src: InFile<Self::Ast>) -> Option<Self> {
let parent_enum = src.value.parent_enum(); let parent_enum = src.value.parent_enum();
let src_enum = Source { file_id: src.file_id, value: parent_enum }; let src_enum = InFile { file_id: src.file_id, value: parent_enum };
let variants = Enum::from_source(db, src_enum)?.variants(db); let parent_enum = Enum::from_source(db, src_enum)?;
variants.into_iter().find(|v| same_source(&v.source(db), &src)) parent_enum.id.child_by_source(db)[keys::ENUM_VARIANT]
.get(&src)
.copied()
.map(EnumVariant::from)
} }
} }
impl FromSource for StructField { impl FromSource for StructField {
type Ast = FieldSource; type Ast = FieldSource;
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> { fn from_source(db: &impl DefDatabase, src: InFile<Self::Ast>) -> Option<Self> {
let variant_def: VariantDef = match src.value { let src = src.as_ref();
FieldSource::Named(ref field) => {
// FIXME this is buggy
let variant_id: VariantId = match src.value {
FieldSource::Named(field) => {
let value = field.syntax().ancestors().find_map(ast::StructDef::cast)?; let value = field.syntax().ancestors().find_map(ast::StructDef::cast)?;
let src = Source { file_id: src.file_id, value }; let src = InFile { file_id: src.file_id, value };
let def = Struct::from_source(db, src)?; let def = Struct::from_source(db, src)?;
VariantDef::from(def) def.id.into()
} }
FieldSource::Pos(ref field) => { FieldSource::Pos(field) => {
let value = field.syntax().ancestors().find_map(ast::EnumVariant::cast)?; let value = field.syntax().ancestors().find_map(ast::EnumVariant::cast)?;
let src = Source { file_id: src.file_id, value }; let src = InFile { file_id: src.file_id, value };
let def = EnumVariant::from_source(db, src)?; let def = EnumVariant::from_source(db, src)?;
VariantDef::from(def) EnumVariantId::from(def).into()
} }
}; };
variant_def
.variant_data(db) let dyn_map = variant_id.child_by_source(db);
.fields() match src.value {
.iter() FieldSource::Pos(it) => dyn_map[keys::TUPLE_FIELD].get(&src.with_value(it.clone())),
.map(|(id, _)| StructField { parent: variant_def, id }) FieldSource::Named(it) => dyn_map[keys::RECORD_FIELD].get(&src.with_value(it.clone())),
.find(|f| f.source(db) == src) }
.copied()
.map(StructField::from)
} }
} }
impl Local { impl Local {
pub fn from_source(db: &impl HirDatabase, src: Source<ast::BindPat>) -> Option<Self> { pub fn from_source(db: &impl HirDatabase, src: InFile<ast::BindPat>) -> Option<Self> {
let file_id = src.file_id; let file_id = src.file_id;
let parent: DefWithBody = src.value.syntax().ancestors().find_map(|it| { let parent: DefWithBody = src.value.syntax().ancestors().find_map(|it| {
let res = match_ast! { let res = match_ast! {
match it { match it {
ast::ConstDef(value) => { Const::from_source(db, Source { value, file_id})?.into() }, ast::ConstDef(value) => { Const::from_source(db, InFile { value, file_id})?.into() },
ast::StaticDef(value) => { Static::from_source(db, Source { value, file_id})?.into() }, ast::StaticDef(value) => { Static::from_source(db, InFile { value, file_id})?.into() },
ast::FnDef(value) => { Function::from_source(db, Source { value, file_id})?.into() }, ast::FnDef(value) => { Function::from_source(db, InFile { value, file_id})?.into() },
_ => return None, _ => return None,
} }
}; };
@ -226,84 +147,111 @@ impl Local {
} }
} }
impl TypeParam {
pub fn from_source(db: &impl HirDatabase, src: InFile<ast::TypeParam>) -> Option<Self> {
let file_id = src.file_id;
let parent: GenericDefId = src.value.syntax().ancestors().find_map(|it| {
let res = match_ast! {
match it {
ast::FnDef(value) => { Function::from_source(db, InFile { value, file_id})?.id.into() },
ast::StructDef(value) => { Struct::from_source(db, InFile { value, file_id})?.id.into() },
ast::EnumDef(value) => { Enum::from_source(db, InFile { value, file_id})?.id.into() },
ast::TraitDef(value) => { Trait::from_source(db, InFile { value, file_id})?.id.into() },
ast::TypeAliasDef(value) => { TypeAlias::from_source(db, InFile { value, file_id})?.id.into() },
ast::ImplBlock(value) => { ImplBlock::from_source(db, InFile { value, file_id})?.id.into() },
_ => return None,
}
};
Some(res)
})?;
let &id = parent.child_by_source(db)[keys::TYPE_PARAM].get(&src)?;
Some(TypeParam { id })
}
}
impl Module { impl Module {
pub fn from_declaration(db: &impl DefDatabase, src: Source<ast::Module>) -> Option<Self> { pub fn from_declaration(db: &impl DefDatabase, src: InFile<ast::Module>) -> Option<Self> {
let _p = profile("Module::from_declaration");
let parent_declaration = src.value.syntax().ancestors().skip(1).find_map(ast::Module::cast); let parent_declaration = src.value.syntax().ancestors().skip(1).find_map(ast::Module::cast);
let parent_module = match parent_declaration { let parent_module = match parent_declaration {
Some(parent_declaration) => { Some(parent_declaration) => {
let src_parent = Source { file_id: src.file_id, value: parent_declaration }; let src_parent = InFile { file_id: src.file_id, value: parent_declaration };
Module::from_declaration(db, src_parent) Module::from_declaration(db, src_parent)
} }
_ => { None => {
let src_parent = Source { let source_file = db.parse(src.file_id.original_file(db)).tree();
file_id: src.file_id, let src_parent =
value: ModuleSource::new(db, Some(src.file_id.original_file(db)), None), InFile { file_id: src.file_id, value: ModuleSource::SourceFile(source_file) };
};
Module::from_definition(db, src_parent) Module::from_definition(db, src_parent)
} }
}?; }?;
let child_name = src.value.name()?; let child_name = src.value.name()?.as_name();
parent_module.child(db, &child_name.as_name()) let def_map = db.crate_def_map(parent_module.id.krate);
let child_id = def_map[parent_module.id.local_id].children.get(&child_name)?;
Some(parent_module.with_module_id(*child_id))
} }
pub fn from_definition(db: &impl DefDatabase, src: Source<ModuleSource>) -> Option<Self> { pub fn from_definition(db: &impl DefDatabase, src: InFile<ModuleSource>) -> Option<Self> {
let _p = profile("Module::from_definition");
match src.value { match src.value {
ModuleSource::Module(ref module) => { ModuleSource::Module(ref module) => {
assert!(!module.has_semi()); assert!(!module.has_semi());
return Module::from_declaration( return Module::from_declaration(
db, db,
Source { file_id: src.file_id, value: module.clone() }, InFile { file_id: src.file_id, value: module.clone() },
); );
} }
ModuleSource::SourceFile(_) => (), ModuleSource::SourceFile(_) => (),
}; };
let original_file = src.file_id.original_file(db); let original_file = src.file_id.original_file(db);
Module::from_file(db, original_file)
}
let (krate, local_id) = db.relevant_crates(original_file).iter().find_map(|&crate_id| { fn from_file(db: &impl DefDatabase, file: FileId) -> Option<Self> {
let _p = profile("Module::from_file");
let (krate, local_id) = db.relevant_crates(file).iter().find_map(|&crate_id| {
let crate_def_map = db.crate_def_map(crate_id); let crate_def_map = db.crate_def_map(crate_id);
let local_id = crate_def_map.modules_for_file(original_file).next()?; let local_id = crate_def_map.modules_for_file(file).next()?;
Some((crate_id, local_id)) Some((crate_id, local_id))
})?; })?;
Some(Module { id: ModuleId { krate, local_id } }) Some(Module { id: ModuleId { krate, local_id } })
} }
} }
fn from_source<N, DEF>(db: &(impl DefDatabase + AstDatabase), src: Source<N>) -> Option<DEF> fn analyze_container(db: &impl DefDatabase, src: InFile<&SyntaxNode>) -> DynMap {
where let _p = profile("analyze_container");
N: AstNode, return child_by_source(db, src).unwrap_or_default();
DEF: AstItemDef<N>,
{
let module_src = ModuleSource::from_child_node(db, src.as_ref().map(|it| it.syntax()));
let module = Module::from_definition(db, Source::new(src.file_id, module_src))?;
let ctx = LocationCtx::new(db, module.id, src.file_id);
let items = db.ast_id_map(src.file_id);
let item_id = items.ast_id(&src.value);
Some(DEF::from_ast_id(ctx, item_id))
}
enum Container { fn child_by_source(db: &impl DefDatabase, src: InFile<&SyntaxNode>) -> Option<DynMap> {
Trait(Trait), for container in src.value.ancestors().skip(1) {
ImplBlock(ImplBlock),
Module(Module),
}
impl Container {
fn find(db: &impl DefDatabase, src: Source<&SyntaxNode>) -> Option<Container> {
// FIXME: this doesn't try to handle nested declarations
for container in src.value.ancestors() {
let res = match_ast! { let res = match_ast! {
match container { match container {
ast::TraitDef(it) => { ast::TraitDef(it) => {
let c = Trait::from_source(db, src.with_value(it))?; let def = Trait::from_source(db, src.with_value(it))?;
Container::Trait(c) def.id.child_by_source(db)
}, },
ast::ImplBlock(it) => { ast::ImplBlock(it) => {
let c = ImplBlock::from_source(db, src.with_value(it))?; let def = ImplBlock::from_source(db, src.with_value(it))?;
Container::ImplBlock(c) def.id.child_by_source(db)
}, },
ast::FnDef(it) => {
let def = Function::from_source(db, src.with_value(it))?;
DefWithBodyId::from(def.id)
.child_by_source(db)
},
ast::StaticDef(it) => {
let def = Static::from_source(db, src.with_value(it))?;
DefWithBodyId::from(def.id)
.child_by_source(db)
},
ast::ConstDef(it) => {
let def = Const::from_source(db, src.with_value(it))?;
DefWithBodyId::from(def.id)
.child_by_source(db)
},
_ => { continue }, _ => { continue },
} }
}; };
@ -312,16 +260,6 @@ impl Container {
let module_source = ModuleSource::from_child_node(db, src); let module_source = ModuleSource::from_child_node(db, src);
let c = Module::from_definition(db, src.with_value(module_source))?; let c = Module::from_definition(db, src.with_value(module_source))?;
Some(Container::Module(c)) Some(c.id.child_by_source(db))
} }
} }
/// XXX: AST Nodes and SyntaxNodes have identity equality semantics: nodes are
/// equal if they point to exactly the same object.
///
/// In general, we do not guarantee that we have exactly one instance of a
/// syntax tree for each file. We probably should add such guarantee, but, for
/// the time being, we will use identity-less AstPtr comparison.
fn same_source<N: AstNode>(s1: &Source<N>, s2: &Source<N>) -> bool {
s1.as_ref().map(AstPtr::new) == s2.as_ref().map(AstPtr::new)
}

View file

@ -0,0 +1,127 @@
//! FIXME: write short doc here
use either::Either;
use hir_def::{
nameres::ModuleSource,
src::{HasChildSource, HasSource as _},
Lookup, VariantId,
};
use ra_syntax::ast;
use crate::{
db::DefDatabase, Const, Enum, EnumVariant, FieldSource, Function, ImplBlock, MacroDef, Module,
Static, Struct, StructField, Trait, TypeAlias, TypeParam, Union,
};
pub use hir_expand::InFile;
pub trait HasSource {
type Ast;
fn source(self, db: &impl DefDatabase) -> InFile<Self::Ast>;
}
/// NB: Module is !HasSource, because it has two source nodes at the same time:
/// definition and declaration.
impl Module {
/// Returns a node which defines this module. That is, a file or a `mod foo {}` with items.
pub fn definition_source(self, db: &impl DefDatabase) -> InFile<ModuleSource> {
let def_map = db.crate_def_map(self.id.krate);
def_map[self.id.local_id].definition_source(db)
}
/// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
/// `None` for the crate root.
pub fn declaration_source(self, db: &impl DefDatabase) -> Option<InFile<ast::Module>> {
let def_map = db.crate_def_map(self.id.krate);
def_map[self.id.local_id].declaration_source(db)
}
}
impl HasSource for StructField {
type Ast = FieldSource;
fn source(self, db: &impl DefDatabase) -> InFile<FieldSource> {
let var = VariantId::from(self.parent);
let src = var.child_source(db);
src.map(|it| match it[self.id].clone() {
Either::Left(it) => FieldSource::Pos(it),
Either::Right(it) => FieldSource::Named(it),
})
}
}
impl HasSource for Struct {
type Ast = ast::StructDef;
fn source(self, db: &impl DefDatabase) -> InFile<ast::StructDef> {
self.id.lookup(db).source(db)
}
}
impl HasSource for Union {
type Ast = ast::UnionDef;
fn source(self, db: &impl DefDatabase) -> InFile<ast::UnionDef> {
self.id.lookup(db).source(db)
}
}
impl HasSource for Enum {
type Ast = ast::EnumDef;
fn source(self, db: &impl DefDatabase) -> InFile<ast::EnumDef> {
self.id.lookup(db).source(db)
}
}
impl HasSource for EnumVariant {
type Ast = ast::EnumVariant;
fn source(self, db: &impl DefDatabase) -> InFile<ast::EnumVariant> {
self.parent.id.child_source(db).map(|map| map[self.id].clone())
}
}
impl HasSource for Function {
type Ast = ast::FnDef;
fn source(self, db: &impl DefDatabase) -> InFile<ast::FnDef> {
self.id.lookup(db).source(db)
}
}
impl HasSource for Const {
type Ast = ast::ConstDef;
fn source(self, db: &impl DefDatabase) -> InFile<ast::ConstDef> {
self.id.lookup(db).source(db)
}
}
impl HasSource for Static {
type Ast = ast::StaticDef;
fn source(self, db: &impl DefDatabase) -> InFile<ast::StaticDef> {
self.id.lookup(db).source(db)
}
}
impl HasSource for Trait {
type Ast = ast::TraitDef;
fn source(self, db: &impl DefDatabase) -> InFile<ast::TraitDef> {
self.id.lookup(db).source(db)
}
}
impl HasSource for TypeAlias {
type Ast = ast::TypeAliasDef;
fn source(self, db: &impl DefDatabase) -> InFile<ast::TypeAliasDef> {
self.id.lookup(db).source(db)
}
}
impl HasSource for MacroDef {
type Ast = ast::MacroCall;
fn source(self, db: &impl DefDatabase) -> InFile<ast::MacroCall> {
InFile {
file_id: self.id.ast_id.expect("MacroDef without ast_id").file_id,
value: self.id.ast_id.expect("MacroDef without ast_id").to_node(db),
}
}
}
impl HasSource for ImplBlock {
type Ast = ast::ImplBlock;
fn source(self, db: &impl DefDatabase) -> InFile<ast::ImplBlock> {
self.id.lookup(db).source(db)
}
}
impl HasSource for TypeParam {
type Ast = Either<ast::TraitDef, ast::TypeParam>;
fn source(self, db: &impl DefDatabase) -> InFile<Self::Ast> {
let child_source = self.id.parent.child_source(db);
child_source.map(|it| it[self.id.local_id].clone())
}
}

View file

@ -26,42 +26,38 @@ macro_rules! impl_froms {
} }
} }
pub mod debug;
pub mod db; pub mod db;
pub mod source_binder; pub mod source_binder;
mod ty;
pub mod diagnostics; pub mod diagnostics;
mod from_id; mod from_id;
mod code_model; mod code_model;
pub mod from_source; mod has_source;
mod from_source;
pub use crate::{ pub use crate::{
code_model::{ code_model::{
src::HasSource, Adt, AssocItem, AttrDef, Const, Container, Crate, CrateDependency, Adt, AssocItem, AttrDef, Const, Crate, CrateDependency, DefWithBody, Docs, Enum,
DefWithBody, Docs, Enum, EnumVariant, FieldSource, Function, GenericDef, GenericParam, EnumVariant, FieldSource, Function, GenericDef, HasAttrs, ImplBlock, Local, MacroDef,
HasAttrs, ImplBlock, Import, Local, MacroDef, Module, ModuleDef, ModuleSource, ScopeDef, Module, ModuleDef, ScopeDef, Static, Struct, StructField, Trait, Type, TypeAlias,
Static, Struct, StructField, Trait, Type, TypeAlias, Union, VariantDef, TypeParam, Union, VariantDef,
}, },
from_source::FromSource, from_source::FromSource,
has_source::HasSource,
source_binder::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer}, source_binder::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer},
ty::{
display::HirDisplay,
primitive::{FloatBitness, FloatTy, IntBitness, IntTy, Signedness, Uncertain},
ApplicationTy, CallableDef, Substs, TraitRef, Ty, TypeCtor, TypeWalk,
},
}; };
pub use hir_def::{ pub use hir_def::{
body::scope::ExprScopes, body::scope::ExprScopes,
builtin_type::BuiltinType, builtin_type::BuiltinType,
docs::Documentation, docs::Documentation,
path::{Path, PathKind}, nameres::ModuleSource,
path::{ModPath, Path, PathKind},
type_ref::Mutability, type_ref::Mutability,
}; };
pub use hir_expand::{ pub use hir_expand::{
either::Either, name::Name, HirFileId, MacroCallId, MacroCallLoc, MacroDefId, MacroFile, Source, name::Name, HirFileId, InFile, MacroCallId, MacroCallLoc, MacroDefId, MacroFile, Origin,
}; };
pub use hir_ty::{display::HirDisplay, CallableDef};

View file

@ -7,19 +7,26 @@
//! purely for "IDE needs". //! purely for "IDE needs".
use std::sync::Arc; use std::sync::Arc;
use either::Either;
use hir_def::{ use hir_def::{
body::{ body::{
scope::{ExprScopes, ScopeId}, scope::{ExprScopes, ScopeId},
BodySourceMap, BodySourceMap,
}, },
expr::{ExprId, PatId}, expr::{ExprId, PatId},
path::known, nameres::ModuleSource,
path::path,
resolver::{self, resolver_for_scope, HasResolver, Resolver, TypeNs, ValueNs}, resolver::{self, resolver_for_scope, HasResolver, Resolver, TypeNs, ValueNs},
AssocItemId, DefWithBodyId, AssocItemId, DefWithBodyId,
}; };
use hir_expand::{ use hir_expand::{
hygiene::Hygiene, name::AsName, AstId, HirFileId, MacroCallId, MacroFileKind, Source, hygiene::Hygiene, name::AsName, AstId, HirFileId, InFile, MacroCallId, MacroCallKind,
}; };
use hir_ty::{
method_resolution::{self, implements_trait},
Canonical, InEnvironment, InferenceResult, TraitEnvironment, Ty,
};
use ra_prof::profile;
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
match_ast, AstPtr, match_ast, AstPtr,
@ -28,16 +35,12 @@ use ra_syntax::{
}; };
use crate::{ use crate::{
db::HirDatabase, db::HirDatabase, Adt, AssocItem, Const, DefWithBody, Enum, EnumVariant, FromSource, Function,
ty::{ ImplBlock, Local, MacroDef, Name, Path, ScopeDef, Static, Struct, Trait, Type, TypeAlias,
method_resolution::{self, implements_trait}, TypeParam,
InEnvironment, TraitEnvironment, Ty,
},
Adt, AssocItem, Const, DefWithBody, Either, Enum, EnumVariant, FromSource, Function,
GenericParam, Local, MacroDef, Name, Path, ScopeDef, Static, Struct, Trait, Type, TypeAlias,
}; };
fn try_get_resolver_for_node(db: &impl HirDatabase, node: Source<&SyntaxNode>) -> Option<Resolver> { fn try_get_resolver_for_node(db: &impl HirDatabase, node: InFile<&SyntaxNode>) -> Option<Resolver> {
match_ast! { match_ast! {
match (node.value) { match (node.value) {
ast::Module(it) => { ast::Module(it) => {
@ -45,7 +48,7 @@ fn try_get_resolver_for_node(db: &impl HirDatabase, node: Source<&SyntaxNode>) -
Some(crate::Module::from_declaration(db, src)?.id.resolver(db)) Some(crate::Module::from_declaration(db, src)?.id.resolver(db))
}, },
ast::SourceFile(it) => { ast::SourceFile(it) => {
let src = node.with_value(crate::ModuleSource::SourceFile(it)); let src = node.with_value(ModuleSource::SourceFile(it));
Some(crate::Module::from_definition(db, src)?.id.resolver(db)) Some(crate::Module::from_definition(db, src)?.id.resolver(db))
}, },
ast::StructDef(it) => { ast::StructDef(it) => {
@ -56,6 +59,14 @@ fn try_get_resolver_for_node(db: &impl HirDatabase, node: Source<&SyntaxNode>) -
let src = node.with_value(it); let src = node.with_value(it);
Some(Enum::from_source(db, src)?.id.resolver(db)) Some(Enum::from_source(db, src)?.id.resolver(db))
}, },
ast::ImplBlock(it) => {
let src = node.with_value(it);
Some(ImplBlock::from_source(db, src)?.id.resolver(db))
},
ast::TraitDef(it) => {
let src = node.with_value(it);
Some(Trait::from_source(db, src)?.id.resolver(db))
},
_ => match node.value.kind() { _ => match node.value.kind() {
FN_DEF | CONST_DEF | STATIC_DEF => { FN_DEF | CONST_DEF | STATIC_DEF => {
let def = def_with_body_from_child_node(db, node)?; let def = def_with_body_from_child_node(db, node)?;
@ -71,14 +82,16 @@ fn try_get_resolver_for_node(db: &impl HirDatabase, node: Source<&SyntaxNode>) -
fn def_with_body_from_child_node( fn def_with_body_from_child_node(
db: &impl HirDatabase, db: &impl HirDatabase,
child: Source<&SyntaxNode>, child: InFile<&SyntaxNode>,
) -> Option<DefWithBody> { ) -> Option<DefWithBody> {
child.value.ancestors().find_map(|node| { let _p = profile("def_with_body_from_child_node");
child.cloned().ancestors_with_macros(db).find_map(|node| {
let n = &node.value;
match_ast! { match_ast! {
match node { match n {
ast::FnDef(def) => { return Function::from_source(db, child.with_value(def)).map(DefWithBody::from); }, ast::FnDef(def) => { return Function::from_source(db, node.with_value(def)).map(DefWithBody::from); },
ast::ConstDef(def) => { return Const::from_source(db, child.with_value(def)).map(DefWithBody::from); }, ast::ConstDef(def) => { return Const::from_source(db, node.with_value(def)).map(DefWithBody::from); },
ast::StaticDef(def) => { return Static::from_source(db, child.with_value(def)).map(DefWithBody::from); }, ast::StaticDef(def) => { return Static::from_source(db, node.with_value(def)).map(DefWithBody::from); },
_ => { None }, _ => { None },
} }
} }
@ -93,7 +106,7 @@ pub struct SourceAnalyzer {
resolver: Resolver, resolver: Resolver,
body_owner: Option<DefWithBody>, body_owner: Option<DefWithBody>,
body_source_map: Option<Arc<BodySourceMap>>, body_source_map: Option<Arc<BodySourceMap>>,
infer: Option<Arc<crate::ty::InferenceResult>>, infer: Option<Arc<InferenceResult>>,
scopes: Option<Arc<ExprScopes>>, scopes: Option<Arc<ExprScopes>>,
} }
@ -104,7 +117,7 @@ pub enum PathResolution {
/// A local binding (only value namespace) /// A local binding (only value namespace)
Local(Local), Local(Local),
/// A generic parameter /// A generic parameter
GenericParam(GenericParam), TypeParam(TypeParam),
SelfType(crate::ImplBlock), SelfType(crate::ImplBlock),
Macro(MacroDef), Macro(MacroDef),
AssocItem(crate::AssocItem), AssocItem(crate::AssocItem),
@ -132,8 +145,8 @@ pub struct ReferenceDescriptor {
pub name: String, pub name: String,
} }
#[derive(Debug)]
pub struct Expansion { pub struct Expansion {
macro_file_kind: MacroFileKind,
macro_call_id: MacroCallId, macro_call_id: MacroCallId,
} }
@ -141,23 +154,24 @@ impl Expansion {
pub fn map_token_down( pub fn map_token_down(
&self, &self,
db: &impl HirDatabase, db: &impl HirDatabase,
token: Source<&SyntaxToken>, token: InFile<&SyntaxToken>,
) -> Option<Source<SyntaxToken>> { ) -> Option<InFile<SyntaxToken>> {
let exp_info = self.file_id().expansion_info(db)?; let exp_info = self.file_id().expansion_info(db)?;
exp_info.map_token_down(token) exp_info.map_token_down(token)
} }
pub fn file_id(&self) -> HirFileId { pub fn file_id(&self) -> HirFileId {
self.macro_call_id.as_file(self.macro_file_kind) self.macro_call_id.as_file()
} }
} }
impl SourceAnalyzer { impl SourceAnalyzer {
pub fn new( pub fn new(
db: &impl HirDatabase, db: &impl HirDatabase,
node: Source<&SyntaxNode>, node: InFile<&SyntaxNode>,
offset: Option<TextUnit>, offset: Option<TextUnit>,
) -> SourceAnalyzer { ) -> SourceAnalyzer {
let _p = profile("SourceAnalyzer::new");
let def_with_body = def_with_body_from_child_node(db, node); let def_with_body = def_with_body_from_child_node(db, node);
if let Some(def) = def_with_body { if let Some(def) = def_with_body {
let (_body, source_map) = db.body_with_source_map(def.into()); let (_body, source_map) = db.body_with_source_map(def.into());
@ -192,12 +206,12 @@ impl SourceAnalyzer {
} }
fn expr_id(&self, expr: &ast::Expr) -> Option<ExprId> { fn expr_id(&self, expr: &ast::Expr) -> Option<ExprId> {
let src = Source { file_id: self.file_id, value: expr }; let src = InFile { file_id: self.file_id, value: expr };
self.body_source_map.as_ref()?.node_expr(src) self.body_source_map.as_ref()?.node_expr(src)
} }
fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> { fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> {
let src = Source { file_id: self.file_id, value: pat }; let src = InFile { file_id: self.file_id, value: pat };
self.body_source_map.as_ref()?.node_pat(src) self.body_source_map.as_ref()?.node_pat(src)
} }
@ -226,7 +240,13 @@ impl SourceAnalyzer {
} }
pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<crate::StructField> { pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<crate::StructField> {
let expr_id = self.expr_id(&field.expr()?)?; let expr_id = match field.expr() {
Some(it) => self.expr_id(&it)?,
None => {
let src = InFile { file_id: self.file_id, value: field };
self.body_source_map.as_ref()?.field_init_shorthand_expr(src)?
}
};
self.infer.as_ref()?.record_field_resolution(expr_id).map(|it| it.into()) self.infer.as_ref()?.record_field_resolution(expr_id).map(|it| it.into())
} }
@ -243,11 +263,11 @@ impl SourceAnalyzer {
pub fn resolve_macro_call( pub fn resolve_macro_call(
&self, &self,
db: &impl HirDatabase, db: &impl HirDatabase,
macro_call: Source<&ast::MacroCall>, macro_call: InFile<&ast::MacroCall>,
) -> Option<MacroDef> { ) -> Option<MacroDef> {
let hygiene = Hygiene::new(db, macro_call.file_id); let hygiene = Hygiene::new(db, macro_call.file_id);
let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &hygiene))?; let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &hygiene))?;
self.resolver.resolve_path_as_macro(db, &path).map(|it| it.into()) self.resolver.resolve_path_as_macro(db, path.mod_path()).map(|it| it.into())
} }
pub fn resolve_hir_path( pub fn resolve_hir_path(
@ -255,43 +275,42 @@ impl SourceAnalyzer {
db: &impl HirDatabase, db: &impl HirDatabase,
path: &crate::Path, path: &crate::Path,
) -> Option<PathResolution> { ) -> Option<PathResolution> {
let types = self.resolver.resolve_path_in_type_ns_fully(db, &path).map(|ty| match ty { let types =
TypeNs::SelfType(it) => PathResolution::SelfType(it.into()), self.resolver.resolve_path_in_type_ns_fully(db, path.mod_path()).map(|ty| match ty {
TypeNs::GenericParam(idx) => PathResolution::GenericParam(GenericParam { TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
parent: self.resolver.generic_def().unwrap(), TypeNs::GenericParam(id) => PathResolution::TypeParam(TypeParam { id }),
idx, TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => {
}), PathResolution::Def(Adt::from(it).into())
TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => {
PathResolution::Def(Adt::from(it).into())
}
TypeNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()),
TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
TypeNs::BuiltinType(it) => PathResolution::Def(it.into()),
TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
});
let values = self.resolver.resolve_path_in_value_ns_fully(db, &path).and_then(|val| {
let res = match val {
ValueNs::LocalBinding(pat_id) => {
let var = Local { parent: self.body_owner?, pat_id };
PathResolution::Local(var)
} }
ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()), TypeNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()),
ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()), TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()), TypeNs::BuiltinType(it) => PathResolution::Def(it.into()),
ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()), TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
ValueNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()), });
}; let values =
Some(res) self.resolver.resolve_path_in_value_ns_fully(db, path.mod_path()).and_then(|val| {
}); let res = match val {
ValueNs::LocalBinding(pat_id) => {
let var = Local { parent: self.body_owner?, pat_id };
PathResolution::Local(var)
}
ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
ValueNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()),
};
Some(res)
});
let items = self let items = self
.resolver .resolver
.resolve_module_path(db, &path) .resolve_module_path_in_items(db, path.mod_path())
.take_types() .take_types()
.map(|it| PathResolution::Def(it.into())); .map(|it| PathResolution::Def(it.into()));
types.or(values).or(items).or_else(|| { types.or(values).or(items).or_else(|| {
self.resolver self.resolver
.resolve_path_as_macro(db, &path) .resolve_path_as_macro(db, path.mod_path())
.map(|def| PathResolution::Macro(def.into())) .map(|def| PathResolution::Macro(def.into()))
}) })
} }
@ -318,7 +337,7 @@ impl SourceAnalyzer {
let name = name_ref.as_name(); let name = name_ref.as_name();
let source_map = self.body_source_map.as_ref()?; let source_map = self.body_source_map.as_ref()?;
let scopes = self.scopes.as_ref()?; let scopes = self.scopes.as_ref()?;
let scope = scope_for(scopes, source_map, Source::new(self.file_id, name_ref.syntax()))?; let scope = scope_for(scopes, source_map, InFile::new(self.file_id, name_ref.syntax()))?;
let entry = scopes.resolve_name_in_scope(scope, &name)?; let entry = scopes.resolve_name_in_scope(scope, &name)?;
Some(ScopeEntryWithSyntax { Some(ScopeEntryWithSyntax {
name: entry.name().clone(), name: entry.name().clone(),
@ -332,10 +351,7 @@ impl SourceAnalyzer {
resolver::ScopeDef::PerNs(it) => it.into(), resolver::ScopeDef::PerNs(it) => it.into(),
resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()), resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()), resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
resolver::ScopeDef::GenericParam(idx) => { resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(TypeParam { id }),
let parent = self.resolver.generic_def().unwrap();
ScopeDef::GenericParam(GenericParam { parent, idx })
}
resolver::ScopeDef::Local(pat_id) => { resolver::ScopeDef::Local(pat_id) => {
let parent = self.resolver.body_owner().unwrap().into(); let parent = self.resolver.body_owner().unwrap().into();
ScopeDef::Local(Local { parent, pat_id }) ScopeDef::Local(Local { parent, pat_id })
@ -349,7 +365,7 @@ impl SourceAnalyzer {
// should switch to general reference search infra there. // should switch to general reference search infra there.
pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> { pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> {
let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap(); let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap();
let ptr = Either::A(AstPtr::new(&ast::Pat::from(pat.clone()))); let ptr = Either::Left(AstPtr::new(&ast::Pat::from(pat.clone())));
fn_def fn_def
.syntax() .syntax()
.descendants() .descendants()
@ -375,7 +391,7 @@ impl SourceAnalyzer {
// There should be no inference vars in types passed here // There should be no inference vars in types passed here
// FIXME check that? // FIXME check that?
// FIXME replace Unknown by bound vars here // FIXME replace Unknown by bound vars here
let canonical = crate::ty::Canonical { value: ty.ty.value.clone(), num_vars: 0 }; let canonical = Canonical { value: ty.ty.value.clone(), num_vars: 0 };
method_resolution::iterate_method_candidates( method_resolution::iterate_method_candidates(
&canonical, &canonical,
db, db,
@ -399,7 +415,7 @@ impl SourceAnalyzer {
// There should be no inference vars in types passed here // There should be no inference vars in types passed here
// FIXME check that? // FIXME check that?
// FIXME replace Unknown by bound vars here // FIXME replace Unknown by bound vars here
let canonical = crate::ty::Canonical { value: ty.ty.value.clone(), num_vars: 0 }; let canonical = Canonical { value: ty.ty.value.clone(), num_vars: 0 };
method_resolution::iterate_method_candidates( method_resolution::iterate_method_candidates(
&canonical, &canonical,
db, db,
@ -410,24 +426,10 @@ impl SourceAnalyzer {
) )
} }
// pub fn autoderef<'a>(
// &'a self,
// db: &'a impl HirDatabase,
// ty: Ty,
// ) -> impl Iterator<Item = Ty> + 'a {
// // There should be no inference vars in types passed here
// // FIXME check that?
// let canonical = crate::ty::Canonical { value: ty, num_vars: 0 };
// let krate = self.resolver.krate();
// let environment = TraitEnvironment::lower(db, &self.resolver);
// let ty = crate::ty::InEnvironment { value: canonical, environment };
// crate::ty::autoderef(db, krate, ty).map(|canonical| canonical.value)
// }
/// Checks that particular type `ty` implements `std::future::Future`. /// Checks that particular type `ty` implements `std::future::Future`.
/// This function is used in `.await` syntax completion. /// This function is used in `.await` syntax completion.
pub fn impls_future(&self, db: &impl HirDatabase, ty: Ty) -> bool { pub fn impls_future(&self, db: &impl HirDatabase, ty: Type) -> bool {
let std_future_path = known::std_future_future(); let std_future_path = path![std::future::Future];
let std_future_trait = match self.resolver.resolve_known_trait(db, &std_future_path) { let std_future_trait = match self.resolver.resolve_known_trait(db, &std_future_path) {
Some(it) => it.into(), Some(it) => it.into(),
@ -439,43 +441,40 @@ impl SourceAnalyzer {
_ => return false, _ => return false,
}; };
let canonical_ty = crate::ty::Canonical { value: ty, num_vars: 0 }; let canonical_ty = Canonical { value: ty.ty.value, num_vars: 0 };
implements_trait(&canonical_ty, db, &self.resolver, krate.into(), std_future_trait) implements_trait(&canonical_ty, db, &self.resolver, krate.into(), std_future_trait)
} }
pub fn expand( pub fn expand(
&self, &self,
db: &impl HirDatabase, db: &impl HirDatabase,
macro_call: Source<&ast::MacroCall>, macro_call: InFile<&ast::MacroCall>,
) -> Option<Expansion> { ) -> Option<Expansion> {
let def = self.resolve_macro_call(db, macro_call)?.id; let def = self.resolve_macro_call(db, macro_call)?.id;
let ast_id = AstId::new( let ast_id = AstId::new(
macro_call.file_id, macro_call.file_id,
db.ast_id_map(macro_call.file_id).ast_id(macro_call.value), db.ast_id_map(macro_call.file_id).ast_id(macro_call.value),
); );
Some(Expansion { Some(Expansion { macro_call_id: def.as_call_id(db, MacroCallKind::FnLike(ast_id)) })
macro_call_id: def.as_call_id(db, ast_id),
macro_file_kind: to_macro_file_kind(macro_call.value),
})
} }
} }
fn scope_for( fn scope_for(
scopes: &ExprScopes, scopes: &ExprScopes,
source_map: &BodySourceMap, source_map: &BodySourceMap,
node: Source<&SyntaxNode>, node: InFile<&SyntaxNode>,
) -> Option<ScopeId> { ) -> Option<ScopeId> {
node.value node.value
.ancestors() .ancestors()
.filter_map(ast::Expr::cast) .filter_map(ast::Expr::cast)
.filter_map(|it| source_map.node_expr(Source::new(node.file_id, &it))) .filter_map(|it| source_map.node_expr(InFile::new(node.file_id, &it)))
.find_map(|it| scopes.scope_for(it)) .find_map(|it| scopes.scope_for(it))
} }
fn scope_for_offset( fn scope_for_offset(
scopes: &ExprScopes, scopes: &ExprScopes,
source_map: &BodySourceMap, source_map: &BodySourceMap,
offset: Source<TextUnit>, offset: InFile<TextUnit>,
) -> Option<ScopeId> { ) -> Option<ScopeId> {
scopes scopes
.scope_by_expr() .scope_by_expr()
@ -540,35 +539,3 @@ fn adjust(
}) })
.map(|(_ptr, scope)| *scope) .map(|(_ptr, scope)| *scope)
} }
/// Given a `ast::MacroCall`, return what `MacroKindFile` it belongs to.
/// FIXME: Not completed
fn to_macro_file_kind(macro_call: &ast::MacroCall) -> MacroFileKind {
let syn = macro_call.syntax();
let parent = match syn.parent() {
Some(it) => it,
None => {
// FIXME:
// If it is root, which means the parent HirFile
// MacroKindFile must be non-items
// return expr now.
return MacroFileKind::Expr;
}
};
match parent.kind() {
MACRO_ITEMS | SOURCE_FILE => MacroFileKind::Items,
LET_STMT => {
// FIXME: Handle Pattern
MacroFileKind::Expr
}
EXPR_STMT => MacroFileKind::Statements,
BLOCK => MacroFileKind::Statements,
ARG_LIST => MacroFileKind::Expr,
TRY_EXPR => MacroFileKind::Expr,
_ => {
// Unknown , Just guess it is `Items`
MacroFileKind::Items
}
}
}

View file

@ -1,4 +0,0 @@
//! The type system. We currently use this to infer types for completion, hover
//! information and various assists.
pub use hir_ty::*;

View file

@ -11,6 +11,9 @@ doctest = false
log = "0.4.5" log = "0.4.5"
once_cell = "1.0.1" once_cell = "1.0.1"
rustc-hash = "1.0" rustc-hash = "1.0"
either = "1.5"
anymap = "0.12"
drop_bomb = "0.1.4"
ra_arena = { path = "../ra_arena" } ra_arena = { path = "../ra_arena" }
ra_db = { path = "../ra_db" } ra_db = { path = "../ra_db" }

View file

@ -2,17 +2,18 @@
use std::sync::Arc; use std::sync::Arc;
use either::Either;
use hir_expand::{ use hir_expand::{
either::Either,
name::{AsName, Name}, name::{AsName, Name},
Source, InFile,
}; };
use ra_arena::{map::ArenaMap, Arena}; use ra_arena::{map::ArenaMap, Arena};
use ra_prof::profile;
use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner}; use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner};
use crate::{ use crate::{
db::DefDatabase, trace::Trace, type_ref::TypeRef, AstItemDef, EnumId, HasChildSource, db::DefDatabase, src::HasChildSource, src::HasSource, trace::Trace, type_ref::TypeRef, EnumId,
LocalEnumVariantId, LocalStructFieldId, StructId, UnionId, VariantId, LocalEnumVariantId, LocalStructFieldId, Lookup, StructId, UnionId, VariantId,
}; };
/// Note that we use `StructData` for unions as well! /// Note that we use `StructData` for unions as well!
@ -50,14 +51,14 @@ pub struct StructFieldData {
impl StructData { impl StructData {
pub(crate) fn struct_data_query(db: &impl DefDatabase, id: StructId) -> Arc<StructData> { pub(crate) fn struct_data_query(db: &impl DefDatabase, id: StructId) -> Arc<StructData> {
let src = id.source(db); let src = id.lookup(db).source(db);
let name = src.value.name().map_or_else(Name::missing, |n| n.as_name()); let name = src.value.name().map_or_else(Name::missing, |n| n.as_name());
let variant_data = VariantData::new(src.value.kind()); let variant_data = VariantData::new(src.value.kind());
let variant_data = Arc::new(variant_data); let variant_data = Arc::new(variant_data);
Arc::new(StructData { name, variant_data }) Arc::new(StructData { name, variant_data })
} }
pub(crate) fn union_data_query(db: &impl DefDatabase, id: UnionId) -> Arc<StructData> { pub(crate) fn union_data_query(db: &impl DefDatabase, id: UnionId) -> Arc<StructData> {
let src = id.source(db); let src = id.lookup(db).source(db);
let name = src.value.name().map_or_else(Name::missing, |n| n.as_name()); let name = src.value.name().map_or_else(Name::missing, |n| n.as_name());
let variant_data = VariantData::new( let variant_data = VariantData::new(
src.value src.value
@ -72,7 +73,8 @@ impl StructData {
impl EnumData { impl EnumData {
pub(crate) fn enum_data_query(db: &impl DefDatabase, e: EnumId) -> Arc<EnumData> { pub(crate) fn enum_data_query(db: &impl DefDatabase, e: EnumId) -> Arc<EnumData> {
let src = e.source(db); let _p = profile("enum_data_query");
let src = e.lookup(db).source(db);
let name = src.value.name().map_or_else(Name::missing, |n| n.as_name()); let name = src.value.name().map_or_else(Name::missing, |n| n.as_name());
let mut trace = Trace::new_for_arena(); let mut trace = Trace::new_for_arena();
lower_enum(&mut trace, &src.value); lower_enum(&mut trace, &src.value);
@ -88,8 +90,8 @@ impl EnumData {
impl HasChildSource for EnumId { impl HasChildSource for EnumId {
type ChildId = LocalEnumVariantId; type ChildId = LocalEnumVariantId;
type Value = ast::EnumVariant; type Value = ast::EnumVariant;
fn child_source(&self, db: &impl DefDatabase) -> Source<ArenaMap<Self::ChildId, Self::Value>> { fn child_source(&self, db: &impl DefDatabase) -> InFile<ArenaMap<Self::ChildId, Self::Value>> {
let src = self.source(db); let src = self.lookup(db).source(db);
let mut trace = Trace::new_for_map(); let mut trace = Trace::new_for_map();
lower_enum(&mut trace, &src.value); lower_enum(&mut trace, &src.value);
src.with_value(trace.into_map()) src.with_value(trace.into_map())
@ -145,7 +147,7 @@ impl HasChildSource for VariantId {
type ChildId = LocalStructFieldId; type ChildId = LocalStructFieldId;
type Value = Either<ast::TupleFieldDef, ast::RecordFieldDef>; type Value = Either<ast::TupleFieldDef, ast::RecordFieldDef>;
fn child_source(&self, db: &impl DefDatabase) -> Source<ArenaMap<Self::ChildId, Self::Value>> { fn child_source(&self, db: &impl DefDatabase) -> InFile<ArenaMap<Self::ChildId, Self::Value>> {
let src = match self { let src = match self {
VariantId::EnumVariantId(it) => { VariantId::EnumVariantId(it) => {
// I don't really like the fact that we call into parent source // I don't really like the fact that we call into parent source
@ -153,8 +155,8 @@ impl HasChildSource for VariantId {
let src = it.parent.child_source(db); let src = it.parent.child_source(db);
src.map(|map| map[it.local_id].kind()) src.map(|map| map[it.local_id].kind())
} }
VariantId::StructId(it) => it.source(db).map(|it| it.kind()), VariantId::StructId(it) => it.lookup(db).source(db).map(|it| it.kind()),
VariantId::UnionId(it) => it.source(db).map(|it| { VariantId::UnionId(it) => it.lookup(db).source(db).map(|it| {
it.record_field_def_list() it.record_field_def_list()
.map(ast::StructKind::Record) .map(ast::StructKind::Record)
.unwrap_or(ast::StructKind::Unit) .unwrap_or(ast::StructKind::Unit)
@ -184,7 +186,7 @@ fn lower_struct(
ast::StructKind::Tuple(fl) => { ast::StructKind::Tuple(fl) => {
for (i, fd) in fl.fields().enumerate() { for (i, fd) in fl.fields().enumerate() {
trace.alloc( trace.alloc(
|| Either::A(fd.clone()), || Either::Left(fd.clone()),
|| StructFieldData { || StructFieldData {
name: Name::new_tuple_field(i), name: Name::new_tuple_field(i),
type_ref: TypeRef::from_ast_opt(fd.type_ref()), type_ref: TypeRef::from_ast_opt(fd.type_ref()),
@ -196,7 +198,7 @@ fn lower_struct(
ast::StructKind::Record(fl) => { ast::StructKind::Record(fl) => {
for fd in fl.fields() { for fd in fl.fields() {
trace.alloc( trace.alloc(
|| Either::B(fd.clone()), || Either::Right(fd.clone()),
|| StructFieldData { || StructFieldData {
name: fd.name().map(|n| n.as_name()).unwrap_or_else(Name::missing), name: fd.name().map(|n| n.as_name()).unwrap_or_else(Name::missing),
type_ref: TypeRef::from_ast_opt(fd.ascribed_type()), type_ref: TypeRef::from_ast_opt(fd.ascribed_type()),

View file

@ -2,7 +2,8 @@
use std::{ops, sync::Arc}; use std::{ops, sync::Arc};
use hir_expand::{either::Either, hygiene::Hygiene, AstId, Source}; use either::Either;
use hir_expand::{hygiene::Hygiene, AstId, InFile};
use mbe::ast_to_token_tree; use mbe::ast_to_token_tree;
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode, AttrsOwner}, ast::{self, AstNode, AttrsOwner},
@ -11,7 +12,7 @@ use ra_syntax::{
use tt::Subtree; use tt::Subtree;
use crate::{ use crate::{
db::DefDatabase, path::Path, AdtId, AstItemDef, AttrDefId, HasChildSource, HasSource, Lookup, db::DefDatabase, path::ModPath, src::HasChildSource, src::HasSource, AdtId, AttrDefId, Lookup,
}; };
#[derive(Default, Debug, Clone, PartialEq, Eq)] #[derive(Default, Debug, Clone, PartialEq, Eq)]
@ -44,8 +45,8 @@ impl Attrs {
AttrDefId::StructFieldId(it) => { AttrDefId::StructFieldId(it) => {
let src = it.parent.child_source(db); let src = it.parent.child_source(db);
match &src.value[it.local_id] { match &src.value[it.local_id] {
Either::A(_tuple) => Attrs::default(), Either::Left(_tuple) => Attrs::default(),
Either::B(record) => Attrs::from_attrs_owner(db, src.with_value(record)), Either::Right(record) => Attrs::from_attrs_owner(db, src.with_value(record)),
} }
} }
AttrDefId::EnumVariantId(var_id) => { AttrDefId::EnumVariantId(var_id) => {
@ -54,13 +55,15 @@ impl Attrs {
Attrs::from_attrs_owner(db, src.map(|it| it as &dyn AttrsOwner)) Attrs::from_attrs_owner(db, src.map(|it| it as &dyn AttrsOwner))
} }
AttrDefId::AdtId(it) => match it { AttrDefId::AdtId(it) => match it {
AdtId::StructId(it) => attrs_from_ast(it.lookup_intern(db).ast_id, db), AdtId::StructId(it) => attrs_from_loc(it.lookup(db), db),
AdtId::EnumId(it) => attrs_from_ast(it.lookup_intern(db).ast_id, db), AdtId::EnumId(it) => attrs_from_loc(it.lookup(db), db),
AdtId::UnionId(it) => attrs_from_ast(it.lookup_intern(db).ast_id, db), AdtId::UnionId(it) => attrs_from_loc(it.lookup(db), db),
}, },
AttrDefId::TraitId(it) => attrs_from_ast(it.lookup_intern(db).ast_id, db), AttrDefId::TraitId(it) => attrs_from_loc(it.lookup(db), db),
AttrDefId::MacroDefId(it) => attrs_from_ast(it.ast_id, db), AttrDefId::MacroDefId(it) => {
AttrDefId::ImplId(it) => attrs_from_ast(it.lookup_intern(db).ast_id, db), it.ast_id.map_or_else(Default::default, |ast_id| attrs_from_ast(ast_id, db))
}
AttrDefId::ImplId(it) => attrs_from_loc(it.lookup(db), db),
AttrDefId::ConstId(it) => attrs_from_loc(it.lookup(db), db), AttrDefId::ConstId(it) => attrs_from_loc(it.lookup(db), db),
AttrDefId::StaticId(it) => attrs_from_loc(it.lookup(db), db), AttrDefId::StaticId(it) => attrs_from_loc(it.lookup(db), db),
AttrDefId::FunctionId(it) => attrs_from_loc(it.lookup(db), db), AttrDefId::FunctionId(it) => attrs_from_loc(it.lookup(db), db),
@ -68,7 +71,7 @@ impl Attrs {
} }
} }
fn from_attrs_owner(db: &impl DefDatabase, owner: Source<&dyn AttrsOwner>) -> Attrs { fn from_attrs_owner(db: &impl DefDatabase, owner: InFile<&dyn AttrsOwner>) -> Attrs {
let hygiene = Hygiene::new(db, owner.file_id); let hygiene = Hygiene::new(db, owner.file_id);
Attrs::new(owner.value, &hygiene) Attrs::new(owner.value, &hygiene)
} }
@ -91,7 +94,7 @@ impl Attrs {
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct Attr { pub struct Attr {
pub(crate) path: Path, pub(crate) path: ModPath,
pub(crate) input: Option<AttrInput>, pub(crate) input: Option<AttrInput>,
} }
@ -103,7 +106,7 @@ pub enum AttrInput {
impl Attr { impl Attr {
fn from_src(ast: ast::Attr, hygiene: &Hygiene) -> Option<Attr> { fn from_src(ast: ast::Attr, hygiene: &Hygiene) -> Option<Attr> {
let path = Path::from_src(ast.path()?, hygiene)?; let path = ModPath::from_src(ast.path()?, hygiene)?;
let input = match ast.input() { let input = match ast.input() {
None => None, None => None,
Some(ast::AttrInput::Literal(lit)) => { Some(ast::AttrInput::Literal(lit)) => {
@ -157,7 +160,7 @@ where
N: ast::AttrsOwner, N: ast::AttrsOwner,
D: DefDatabase, D: DefDatabase,
{ {
let src = Source::new(src.file_id(), src.to_node(db)); let src = InFile::new(src.file_id, src.to_node(db));
Attrs::from_attrs_owner(db, src.as_ref().map(|it| it as &dyn AttrsOwner)) Attrs::from_attrs_owner(db, src.as_ref().map(|it| it as &dyn AttrsOwner))
} }

View file

@ -3,58 +3,75 @@
mod lower; mod lower;
pub mod scope; pub mod scope;
use std::{ops::Index, sync::Arc}; use std::{mem, ops::Index, sync::Arc};
use drop_bomb::DropBomb;
use either::Either;
use hir_expand::{ use hir_expand::{
either::Either, hygiene::Hygiene, AstId, HirFileId, MacroDefId, MacroFileKind, Source, ast_id_map::AstIdMap, hygiene::Hygiene, AstId, HirFileId, InFile, MacroCallKind, MacroDefId,
}; };
use ra_arena::{map::ArenaMap, Arena}; use ra_arena::{map::ArenaMap, Arena};
use ra_prof::profile;
use ra_syntax::{ast, AstNode, AstPtr}; use ra_syntax::{ast, AstNode, AstPtr};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use crate::{ use crate::{
db::DefDatabase, db::DefDatabase,
expr::{Expr, ExprId, Pat, PatId}, expr::{Expr, ExprId, Pat, PatId},
item_scope::BuiltinShadowMode,
item_scope::ItemScope,
nameres::CrateDefMap, nameres::CrateDefMap,
path::Path, path::{ModPath, Path},
DefWithBodyId, HasModule, HasSource, Lookup, ModuleId, src::HasSource,
DefWithBodyId, HasModule, Lookup, ModuleId,
}; };
struct Expander { pub(crate) struct Expander {
crate_def_map: Arc<CrateDefMap>, crate_def_map: Arc<CrateDefMap>,
current_file_id: HirFileId, current_file_id: HirFileId,
hygiene: Hygiene, hygiene: Hygiene,
ast_id_map: Arc<AstIdMap>,
module: ModuleId, module: ModuleId,
} }
impl Expander { impl Expander {
fn new(db: &impl DefDatabase, current_file_id: HirFileId, module: ModuleId) -> Expander { pub(crate) fn new(
db: &impl DefDatabase,
current_file_id: HirFileId,
module: ModuleId,
) -> Expander {
let crate_def_map = db.crate_def_map(module.krate); let crate_def_map = db.crate_def_map(module.krate);
let hygiene = Hygiene::new(db, current_file_id); let hygiene = Hygiene::new(db, current_file_id);
Expander { crate_def_map, current_file_id, hygiene, module } let ast_id_map = db.ast_id_map(current_file_id);
Expander { crate_def_map, current_file_id, hygiene, ast_id_map, module }
} }
fn enter_expand( pub(crate) fn enter_expand<T: ast::AstNode, DB: DefDatabase>(
&mut self, &mut self,
db: &impl DefDatabase, db: &DB,
macro_call: ast::MacroCall, macro_call: ast::MacroCall,
) -> Option<(Mark, ast::Expr)> { ) -> Option<(Mark, T)> {
let ast_id = AstId::new( let ast_id = AstId::new(
self.current_file_id, self.current_file_id,
db.ast_id_map(self.current_file_id).ast_id(&macro_call), db.ast_id_map(self.current_file_id).ast_id(&macro_call),
); );
if let Some(path) = macro_call.path().and_then(|path| self.parse_path(path)) { if let Some(path) = macro_call.path().and_then(|path| self.parse_mod_path(path)) {
if let Some(def) = self.resolve_path_as_macro(db, &path) { if let Some(def) = self.resolve_path_as_macro(db, &path) {
let call_id = def.as_call_id(db, ast_id); let call_id = def.as_call_id(db, MacroCallKind::FnLike(ast_id));
let file_id = call_id.as_file(MacroFileKind::Expr); let file_id = call_id.as_file();
if let Some(node) = db.parse_or_expand(file_id) { if let Some(node) = db.parse_or_expand(file_id) {
if let Some(expr) = ast::Expr::cast(node) { if let Some(expr) = T::cast(node) {
log::debug!("macro expansion {:#?}", expr.syntax()); log::debug!("macro expansion {:#?}", expr.syntax());
let mark = Mark { file_id: self.current_file_id }; let mark = Mark {
file_id: self.current_file_id,
ast_id_map: mem::take(&mut self.ast_id_map),
bomb: DropBomb::new("expansion mark dropped"),
};
self.hygiene = Hygiene::new(db, file_id); self.hygiene = Hygiene::new(db, file_id);
self.current_file_id = file_id; self.current_file_id = file_id;
self.ast_id_map = db.ast_id_map(file_id);
return Some((mark, expr)); return Some((mark, expr));
} }
@ -67,35 +84,42 @@ impl Expander {
None None
} }
fn exit(&mut self, db: &impl DefDatabase, mark: Mark) { pub(crate) fn exit(&mut self, db: &impl DefDatabase, mut mark: Mark) {
self.hygiene = Hygiene::new(db, mark.file_id); self.hygiene = Hygiene::new(db, mark.file_id);
self.current_file_id = mark.file_id; self.current_file_id = mark.file_id;
std::mem::forget(mark); self.ast_id_map = mem::take(&mut mark.ast_id_map);
mark.bomb.defuse();
} }
fn to_source<T>(&self, value: T) -> Source<T> { pub(crate) fn to_source<T>(&self, value: T) -> InFile<T> {
Source { file_id: self.current_file_id, value } InFile { file_id: self.current_file_id, value }
} }
fn parse_path(&mut self, path: ast::Path) -> Option<Path> { fn parse_path(&mut self, path: ast::Path) -> Option<Path> {
Path::from_src(path, &self.hygiene) Path::from_src(path, &self.hygiene)
} }
fn resolve_path_as_macro(&self, db: &impl DefDatabase, path: &Path) -> Option<MacroDefId> { fn parse_mod_path(&mut self, path: ast::Path) -> Option<ModPath> {
self.crate_def_map.resolve_path(db, self.module.local_id, path).0.take_macros() ModPath::from_src(path, &self.hygiene)
}
fn resolve_path_as_macro(&self, db: &impl DefDatabase, path: &ModPath) -> Option<MacroDefId> {
self.crate_def_map
.resolve_path(db, self.module.local_id, path, BuiltinShadowMode::Other)
.0
.take_macros()
}
fn ast_id<N: AstNode>(&self, item: &N) -> AstId<N> {
let file_local_id = self.ast_id_map.ast_id(item);
AstId::new(self.current_file_id, file_local_id)
} }
} }
struct Mark { pub(crate) struct Mark {
file_id: HirFileId, file_id: HirFileId,
} ast_id_map: Arc<AstIdMap>,
bomb: DropBomb,
impl Drop for Mark {
fn drop(&mut self) {
if !std::thread::panicking() {
panic!("dropped mark")
}
}
} }
/// The body of an item (function, const etc.). /// The body of an item (function, const etc.).
@ -112,13 +136,14 @@ pub struct Body {
pub params: Vec<PatId>, pub params: Vec<PatId>,
/// The `ExprId` of the actual body expression. /// The `ExprId` of the actual body expression.
pub body_expr: ExprId, pub body_expr: ExprId,
pub item_scope: ItemScope,
} }
pub type ExprPtr = Either<AstPtr<ast::Expr>, AstPtr<ast::RecordField>>; pub type ExprPtr = Either<AstPtr<ast::Expr>, AstPtr<ast::RecordField>>;
pub type ExprSource = Source<ExprPtr>; pub type ExprSource = InFile<ExprPtr>;
pub type PatPtr = Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>; pub type PatPtr = Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>;
pub type PatSource = Source<PatPtr>; pub type PatSource = InFile<PatPtr>;
/// An item body together with the mapping from syntax nodes to HIR expression /// An item body together with the mapping from syntax nodes to HIR expression
/// IDs. This is needed to go from e.g. a position in a file to the HIR /// IDs. This is needed to go from e.g. a position in a file to the HIR
@ -145,6 +170,7 @@ impl Body {
db: &impl DefDatabase, db: &impl DefDatabase,
def: DefWithBodyId, def: DefWithBodyId,
) -> (Arc<Body>, Arc<BodySourceMap>) { ) -> (Arc<Body>, Arc<BodySourceMap>) {
let _p = profile("body_with_source_map_query");
let mut params = None; let mut params = None;
let (file_id, module, body) = match def { let (file_id, module, body) = match def {
@ -166,7 +192,7 @@ impl Body {
} }
}; };
let expander = Expander::new(db, file_id, module); let expander = Expander::new(db, file_id, module);
let (body, source_map) = Body::new(db, expander, params, body); let (body, source_map) = Body::new(db, def, expander, params, body);
(Arc::new(body), Arc::new(source_map)) (Arc::new(body), Arc::new(source_map))
} }
@ -176,11 +202,12 @@ impl Body {
fn new( fn new(
db: &impl DefDatabase, db: &impl DefDatabase,
def: DefWithBodyId,
expander: Expander, expander: Expander,
params: Option<ast::ParamList>, params: Option<ast::ParamList>,
body: Option<ast::Expr>, body: Option<ast::Expr>,
) -> (Body, BodySourceMap) { ) -> (Body, BodySourceMap) {
lower::lower(db, expander, params, body) lower::lower(db, def, expander, params, body)
} }
} }
@ -205,8 +232,13 @@ impl BodySourceMap {
self.expr_map_back.get(expr).copied() self.expr_map_back.get(expr).copied()
} }
pub fn node_expr(&self, node: Source<&ast::Expr>) -> Option<ExprId> { pub fn node_expr(&self, node: InFile<&ast::Expr>) -> Option<ExprId> {
let src = node.map(|it| Either::A(AstPtr::new(it))); let src = node.map(|it| Either::Left(AstPtr::new(it)));
self.expr_map.get(&src).cloned()
}
pub fn field_init_shorthand_expr(&self, node: InFile<&ast::RecordField>) -> Option<ExprId> {
let src = node.map(|it| Either::Right(AstPtr::new(it)));
self.expr_map.get(&src).cloned() self.expr_map.get(&src).cloned()
} }
@ -214,8 +246,8 @@ impl BodySourceMap {
self.pat_map_back.get(pat).copied() self.pat_map_back.get(pat).copied()
} }
pub fn node_pat(&self, node: Source<&ast::Pat>) -> Option<PatId> { pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<PatId> {
let src = node.map(|it| Either::A(AstPtr::new(it))); let src = node.map(|it| Either::Left(AstPtr::new(it)));
self.pat_map.get(&src).cloned() self.pat_map.get(&src).cloned()
} }

View file

@ -1,14 +1,13 @@
//! Transforms `ast::Expr` into an equivalent `hir_def::expr::Expr` //! Transforms `ast::Expr` into an equivalent `hir_def::expr::Expr`
//! representation. //! representation.
use hir_expand::{ use either::Either;
either::Either,
name::{self, AsName, Name}, use hir_expand::name::{name, AsName, Name};
};
use ra_arena::Arena; use ra_arena::Arena;
use ra_syntax::{ use ra_syntax::{
ast::{ ast::{
self, ArgListOwner, ArrayExprKind, LiteralKind, LoopBodyOwner, NameOwner, self, ArgListOwner, ArrayExprKind, LiteralKind, LoopBodyOwner, ModuleItemOwner, NameOwner,
TypeAscriptionOwner, TypeAscriptionOwner,
}, },
AstNode, AstPtr, AstNode, AstPtr,
@ -26,23 +25,28 @@ use crate::{
path::GenericArgs, path::GenericArgs,
path::Path, path::Path,
type_ref::{Mutability, TypeRef}, type_ref::{Mutability, TypeRef},
ConstLoc, ContainerId, DefWithBodyId, EnumLoc, FunctionLoc, Intern, ModuleDefId, StaticLoc,
StructLoc, TraitLoc, TypeAliasLoc, UnionLoc,
}; };
pub(super) fn lower( pub(super) fn lower(
db: &impl DefDatabase, db: &impl DefDatabase,
def: DefWithBodyId,
expander: Expander, expander: Expander,
params: Option<ast::ParamList>, params: Option<ast::ParamList>,
body: Option<ast::Expr>, body: Option<ast::Expr>,
) -> (Body, BodySourceMap) { ) -> (Body, BodySourceMap) {
ExprCollector { ExprCollector {
expander,
db, db,
def,
expander,
source_map: BodySourceMap::default(), source_map: BodySourceMap::default(),
body: Body { body: Body {
exprs: Arena::default(), exprs: Arena::default(),
pats: Arena::default(), pats: Arena::default(),
params: Vec::new(), params: Vec::new(),
body_expr: ExprId::dummy(), body_expr: ExprId::dummy(),
item_scope: Default::default(),
}, },
} }
.collect(params, body) .collect(params, body)
@ -50,6 +54,7 @@ pub(super) fn lower(
struct ExprCollector<DB> { struct ExprCollector<DB> {
db: DB, db: DB,
def: DefWithBodyId,
expander: Expander, expander: Expander,
body: Body, body: Body,
@ -70,11 +75,11 @@ where
let ptr = AstPtr::new(&self_param); let ptr = AstPtr::new(&self_param);
let param_pat = self.alloc_pat( let param_pat = self.alloc_pat(
Pat::Bind { Pat::Bind {
name: name::SELF_PARAM, name: name![self],
mode: BindingAnnotation::Unannotated, mode: BindingAnnotation::Unannotated,
subpat: None, subpat: None,
}, },
Either::B(ptr), Either::Right(ptr),
); );
self.body.params.push(param_pat); self.body.params.push(param_pat);
} }
@ -94,7 +99,7 @@ where
} }
fn alloc_expr(&mut self, expr: Expr, ptr: AstPtr<ast::Expr>) -> ExprId { fn alloc_expr(&mut self, expr: Expr, ptr: AstPtr<ast::Expr>) -> ExprId {
let ptr = Either::A(ptr); let ptr = Either::Left(ptr);
let id = self.body.exprs.alloc(expr); let id = self.body.exprs.alloc(expr);
let src = self.expander.to_source(ptr); let src = self.expander.to_source(ptr);
self.source_map.expr_map.insert(src, id); self.source_map.expr_map.insert(src, id);
@ -107,7 +112,7 @@ where
self.body.exprs.alloc(expr) self.body.exprs.alloc(expr)
} }
fn alloc_expr_field_shorthand(&mut self, expr: Expr, ptr: AstPtr<ast::RecordField>) -> ExprId { fn alloc_expr_field_shorthand(&mut self, expr: Expr, ptr: AstPtr<ast::RecordField>) -> ExprId {
let ptr = Either::B(ptr); let ptr = Either::Right(ptr);
let id = self.body.exprs.alloc(expr); let id = self.body.exprs.alloc(expr);
let src = self.expander.to_source(ptr); let src = self.expander.to_source(ptr);
self.source_map.expr_map.insert(src, id); self.source_map.expr_map.insert(src, id);
@ -277,7 +282,7 @@ where
ast::Expr::ParenExpr(e) => { ast::Expr::ParenExpr(e) => {
let inner = self.collect_expr_opt(e.expr()); let inner = self.collect_expr_opt(e.expr());
// make the paren expr point to the inner expression as well // make the paren expr point to the inner expression as well
let src = self.expander.to_source(Either::A(syntax_ptr)); let src = self.expander.to_source(Either::Left(syntax_ptr));
self.source_map.expr_map.insert(src, inner); self.source_map.expr_map.insert(src, inner);
inner inner
} }
@ -367,8 +372,9 @@ where
arg_types.push(type_ref); arg_types.push(type_ref);
} }
} }
let ret_type = e.ret_type().and_then(|r| r.type_ref()).map(TypeRef::from_ast);
let body = self.collect_expr_opt(e.body()); let body = self.collect_expr_opt(e.body());
self.alloc_expr(Expr::Lambda { args, arg_types, body }, syntax_ptr) self.alloc_expr(Expr::Lambda { args, arg_types, ret_type, body }, syntax_ptr)
} }
ast::Expr::BinExpr(e) => { ast::Expr::BinExpr(e) => {
let lhs = self.collect_expr_opt(e.lhs()); let lhs = self.collect_expr_opt(e.lhs());
@ -429,10 +435,17 @@ where
let index = self.collect_expr_opt(e.index()); let index = self.collect_expr_opt(e.index());
self.alloc_expr(Expr::Index { base, index }, syntax_ptr) self.alloc_expr(Expr::Index { base, index }, syntax_ptr)
} }
ast::Expr::RangeExpr(e) => {
// FIXME implement HIR for these: let lhs = e.start().map(|lhs| self.collect_expr(lhs));
ast::Expr::Label(_e) => self.alloc_expr(Expr::Missing, syntax_ptr), let rhs = e.end().map(|rhs| self.collect_expr(rhs));
ast::Expr::RangeExpr(_e) => self.alloc_expr(Expr::Missing, syntax_ptr), match e.op_kind() {
Some(range_type) => {
self.alloc_expr(Expr::Range { lhs, rhs, range_type }, syntax_ptr)
}
None => self.alloc_expr(Expr::Missing, syntax_ptr),
}
}
// FIXME expand to statements in statement position
ast::Expr::MacroCall(e) => match self.expander.enter_expand(self.db, e) { ast::Expr::MacroCall(e) => match self.expander.enter_expand(self.db, e) {
Some((mark, expansion)) => { Some((mark, expansion)) => {
let id = self.collect_expr(expansion); let id = self.collect_expr(expansion);
@ -441,6 +454,9 @@ where
} }
None => self.alloc_expr(Expr::Missing, syntax_ptr), None => self.alloc_expr(Expr::Missing, syntax_ptr),
}, },
// FIXME implement HIR for these:
ast::Expr::Label(_e) => self.alloc_expr(Expr::Missing, syntax_ptr),
} }
} }
@ -458,6 +474,7 @@ where
Some(block) => block, Some(block) => block,
None => return self.alloc_expr(Expr::Missing, syntax_node_ptr), None => return self.alloc_expr(Expr::Missing, syntax_node_ptr),
}; };
self.collect_block_items(&block);
let statements = block let statements = block
.statements() .statements()
.map(|s| match s { .map(|s| match s {
@ -474,6 +491,63 @@ where
self.alloc_expr(Expr::Block { statements, tail }, syntax_node_ptr) self.alloc_expr(Expr::Block { statements, tail }, syntax_node_ptr)
} }
fn collect_block_items(&mut self, block: &ast::Block) {
let container = ContainerId::DefWithBodyId(self.def);
for item in block.items() {
let (def, name): (ModuleDefId, Option<ast::Name>) = match item {
ast::ModuleItem::FnDef(def) => {
let ast_id = self.expander.ast_id(&def);
(
FunctionLoc { container: container.into(), ast_id }.intern(self.db).into(),
def.name(),
)
}
ast::ModuleItem::TypeAliasDef(def) => {
let ast_id = self.expander.ast_id(&def);
(
TypeAliasLoc { container: container.into(), ast_id }.intern(self.db).into(),
def.name(),
)
}
ast::ModuleItem::ConstDef(def) => {
let ast_id = self.expander.ast_id(&def);
(
ConstLoc { container: container.into(), ast_id }.intern(self.db).into(),
def.name(),
)
}
ast::ModuleItem::StaticDef(def) => {
let ast_id = self.expander.ast_id(&def);
(StaticLoc { container, ast_id }.intern(self.db).into(), def.name())
}
ast::ModuleItem::StructDef(def) => {
let ast_id = self.expander.ast_id(&def);
(StructLoc { container, ast_id }.intern(self.db).into(), def.name())
}
ast::ModuleItem::EnumDef(def) => {
let ast_id = self.expander.ast_id(&def);
(EnumLoc { container, ast_id }.intern(self.db).into(), def.name())
}
ast::ModuleItem::UnionDef(def) => {
let ast_id = self.expander.ast_id(&def);
(UnionLoc { container, ast_id }.intern(self.db).into(), def.name())
}
ast::ModuleItem::TraitDef(def) => {
let ast_id = self.expander.ast_id(&def);
(TraitLoc { container, ast_id }.intern(self.db).into(), def.name())
}
ast::ModuleItem::ImplBlock(_)
| ast::ModuleItem::UseItem(_)
| ast::ModuleItem::ExternCrateItem(_)
| ast::ModuleItem::Module(_) => continue,
};
self.body.item_scope.define_def(def);
if let Some(name) = name {
self.body.item_scope.push_res(name.as_name(), def.into());
}
}
}
fn collect_block_opt(&mut self, expr: Option<ast::BlockExpr>) -> ExprId { fn collect_block_opt(&mut self, expr: Option<ast::BlockExpr>) -> ExprId {
if let Some(block) = expr { if let Some(block) = expr {
self.collect_block(block) self.collect_block(block)
@ -541,7 +615,7 @@ where
ast::Pat::SlicePat(_) | ast::Pat::RangePat(_) => Pat::Missing, ast::Pat::SlicePat(_) | ast::Pat::RangePat(_) => Pat::Missing,
}; };
let ptr = AstPtr::new(&pat); let ptr = AstPtr::new(&pat);
self.alloc_pat(pattern, Either::A(ptr)) self.alloc_pat(pattern, Either::Left(ptr))
} }
fn collect_pat_opt(&mut self, pat: Option<ast::Pat>) -> PatId { fn collect_pat_opt(&mut self, pat: Option<ast::Pat>) -> PatId {

View file

@ -171,7 +171,7 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use hir_expand::{name::AsName, Source}; use hir_expand::{name::AsName, InFile};
use ra_db::{fixture::WithFixture, FileId, SourceDatabase}; use ra_db::{fixture::WithFixture, FileId, SourceDatabase};
use ra_syntax::{algo::find_node_at_offset, ast, AstNode}; use ra_syntax::{algo::find_node_at_offset, ast, AstNode};
use test_utils::{assert_eq_text, covers, extract_offset}; use test_utils::{assert_eq_text, covers, extract_offset};
@ -183,8 +183,8 @@ mod tests {
let crate_def_map = db.crate_def_map(krate); let crate_def_map = db.crate_def_map(krate);
let module = crate_def_map.modules_for_file(file_id).next().unwrap(); let module = crate_def_map.modules_for_file(file_id).next().unwrap();
let (_, res) = crate_def_map[module].scope.entries().next().unwrap(); let (_, def) = crate_def_map[module].scope.entries().next().unwrap();
match res.def.take_values().unwrap() { match def.take_values().unwrap() {
ModuleDefId::FunctionId(it) => it, ModuleDefId::FunctionId(it) => it,
_ => panic!(), _ => panic!(),
} }
@ -211,7 +211,7 @@ mod tests {
let (_body, source_map) = db.body_with_source_map(function.into()); let (_body, source_map) = db.body_with_source_map(function.into());
let expr_id = source_map let expr_id = source_map
.node_expr(Source { file_id: file_id.into(), value: &marker.into() }) .node_expr(InFile { file_id: file_id.into(), value: &marker.into() })
.unwrap(); .unwrap();
let scope = scopes.scope_for(expr_id); let scope = scopes.scope_for(expr_id);
@ -318,7 +318,7 @@ mod tests {
let expr_scope = { let expr_scope = {
let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap(); let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap();
let expr_id = let expr_id =
source_map.node_expr(Source { file_id: file_id.into(), value: &expr_ast }).unwrap(); source_map.node_expr(InFile { file_id: file_id.into(), value: &expr_ast }).unwrap();
scopes.scope_for(expr_id).unwrap() scopes.scope_for(expr_id).unwrap()
}; };

View file

@ -5,7 +5,7 @@
use std::fmt; use std::fmt;
use hir_expand::name::{self, Name}; use hir_expand::name::{name, Name};
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum Signedness { pub enum Signedness {
@ -52,26 +52,26 @@ pub enum BuiltinType {
impl BuiltinType { impl BuiltinType {
#[rustfmt::skip] #[rustfmt::skip]
pub const ALL: &'static [(Name, BuiltinType)] = &[ pub const ALL: &'static [(Name, BuiltinType)] = &[
(name::CHAR, BuiltinType::Char), (name![char], BuiltinType::Char),
(name::BOOL, BuiltinType::Bool), (name![bool], BuiltinType::Bool),
(name::STR, BuiltinType::Str ), (name![str], BuiltinType::Str),
(name::ISIZE, BuiltinType::Int(BuiltinInt::ISIZE)), (name![isize], BuiltinType::Int(BuiltinInt::ISIZE)),
(name::I8, BuiltinType::Int(BuiltinInt::I8)), (name![i8], BuiltinType::Int(BuiltinInt::I8)),
(name::I16, BuiltinType::Int(BuiltinInt::I16)), (name![i16], BuiltinType::Int(BuiltinInt::I16)),
(name::I32, BuiltinType::Int(BuiltinInt::I32)), (name![i32], BuiltinType::Int(BuiltinInt::I32)),
(name::I64, BuiltinType::Int(BuiltinInt::I64)), (name![i64], BuiltinType::Int(BuiltinInt::I64)),
(name::I128, BuiltinType::Int(BuiltinInt::I128)), (name![i128], BuiltinType::Int(BuiltinInt::I128)),
(name::USIZE, BuiltinType::Int(BuiltinInt::USIZE)), (name![usize], BuiltinType::Int(BuiltinInt::USIZE)),
(name::U8, BuiltinType::Int(BuiltinInt::U8)), (name![u8], BuiltinType::Int(BuiltinInt::U8)),
(name::U16, BuiltinType::Int(BuiltinInt::U16)), (name![u16], BuiltinType::Int(BuiltinInt::U16)),
(name::U32, BuiltinType::Int(BuiltinInt::U32)), (name![u32], BuiltinType::Int(BuiltinInt::U32)),
(name::U64, BuiltinType::Int(BuiltinInt::U64)), (name![u64], BuiltinType::Int(BuiltinInt::U64)),
(name::U128, BuiltinType::Int(BuiltinInt::U128)), (name![u128], BuiltinType::Int(BuiltinInt::U128)),
(name::F32, BuiltinType::Float(BuiltinFloat::F32)), (name![f32], BuiltinType::Float(BuiltinFloat::F32)),
(name::F64, BuiltinType::Float(BuiltinFloat::F64)), (name![f64], BuiltinType::Float(BuiltinFloat::F64)),
]; ];
} }

View file

@ -0,0 +1,177 @@
//! When *constructing* `hir`, we start at some parent syntax node and recursively
//! lower the children.
//!
//! This modules allows one to go in the opposite direction: start with a syntax
//! node for a *child*, and get its hir.
use either::Either;
use crate::{
db::DefDatabase,
dyn_map::DynMap,
item_scope::ItemScope,
keys,
src::{HasChildSource, HasSource},
AdtId, AssocItemId, DefWithBodyId, EnumId, EnumVariantId, ImplId, Lookup, ModuleDefId,
ModuleId, StructFieldId, TraitId, VariantId,
};
pub trait ChildBySource {
fn child_by_source(&self, db: &impl DefDatabase) -> DynMap;
}
impl ChildBySource for TraitId {
fn child_by_source(&self, db: &impl DefDatabase) -> DynMap {
let mut res = DynMap::default();
let data = db.trait_data(*self);
for (_name, item) in data.items.iter() {
match *item {
AssocItemId::FunctionId(func) => {
let src = func.lookup(db).source(db);
res[keys::FUNCTION].insert(src, func)
}
AssocItemId::ConstId(konst) => {
let src = konst.lookup(db).source(db);
res[keys::CONST].insert(src, konst)
}
AssocItemId::TypeAliasId(ty) => {
let src = ty.lookup(db).source(db);
res[keys::TYPE_ALIAS].insert(src, ty)
}
}
}
res
}
}
impl ChildBySource for ImplId {
fn child_by_source(&self, db: &impl DefDatabase) -> DynMap {
let mut res = DynMap::default();
let data = db.impl_data(*self);
for &item in data.items.iter() {
match item {
AssocItemId::FunctionId(func) => {
let src = func.lookup(db).source(db);
res[keys::FUNCTION].insert(src, func)
}
AssocItemId::ConstId(konst) => {
let src = konst.lookup(db).source(db);
res[keys::CONST].insert(src, konst)
}
AssocItemId::TypeAliasId(ty) => {
let src = ty.lookup(db).source(db);
res[keys::TYPE_ALIAS].insert(src, ty)
}
}
}
res
}
}
impl ChildBySource for ModuleId {
fn child_by_source(&self, db: &impl DefDatabase) -> DynMap {
let crate_def_map = db.crate_def_map(self.krate);
let module_data = &crate_def_map[self.local_id];
module_data.scope.child_by_source(db)
}
}
impl ChildBySource for ItemScope {
fn child_by_source(&self, db: &impl DefDatabase) -> DynMap {
let mut res = DynMap::default();
self.declarations().for_each(|item| add_module_def(db, &mut res, item));
self.impls().for_each(|imp| add_impl(db, &mut res, imp));
return res;
fn add_module_def(db: &impl DefDatabase, map: &mut DynMap, item: ModuleDefId) {
match item {
ModuleDefId::FunctionId(func) => {
let src = func.lookup(db).source(db);
map[keys::FUNCTION].insert(src, func)
}
ModuleDefId::ConstId(konst) => {
let src = konst.lookup(db).source(db);
map[keys::CONST].insert(src, konst)
}
ModuleDefId::StaticId(statik) => {
let src = statik.lookup(db).source(db);
map[keys::STATIC].insert(src, statik)
}
ModuleDefId::TypeAliasId(ty) => {
let src = ty.lookup(db).source(db);
map[keys::TYPE_ALIAS].insert(src, ty)
}
ModuleDefId::TraitId(trait_) => {
let src = trait_.lookup(db).source(db);
map[keys::TRAIT].insert(src, trait_)
}
ModuleDefId::AdtId(adt) => match adt {
AdtId::StructId(strukt) => {
let src = strukt.lookup(db).source(db);
map[keys::STRUCT].insert(src, strukt)
}
AdtId::UnionId(union_) => {
let src = union_.lookup(db).source(db);
map[keys::UNION].insert(src, union_)
}
AdtId::EnumId(enum_) => {
let src = enum_.lookup(db).source(db);
map[keys::ENUM].insert(src, enum_)
}
},
_ => (),
}
}
fn add_impl(db: &impl DefDatabase, map: &mut DynMap, imp: ImplId) {
let src = imp.lookup(db).source(db);
map[keys::IMPL].insert(src, imp)
}
}
}
impl ChildBySource for VariantId {
fn child_by_source(&self, db: &impl DefDatabase) -> DynMap {
let mut res = DynMap::default();
let arena_map = self.child_source(db);
let arena_map = arena_map.as_ref();
for (local_id, source) in arena_map.value.iter() {
let id = StructFieldId { parent: *self, local_id };
match source {
Either::Left(source) => {
res[keys::TUPLE_FIELD].insert(arena_map.with_value(source.clone()), id)
}
Either::Right(source) => {
res[keys::RECORD_FIELD].insert(arena_map.with_value(source.clone()), id)
}
}
}
res
}
}
impl ChildBySource for EnumId {
fn child_by_source(&self, db: &impl DefDatabase) -> DynMap {
let mut res = DynMap::default();
let arena_map = self.child_source(db);
let arena_map = arena_map.as_ref();
for (local_id, source) in arena_map.value.iter() {
let id = EnumVariantId { parent: *self, local_id };
res[keys::ENUM_VARIANT].insert(arena_map.with_value(source.clone()), id)
}
res
}
}
impl ChildBySource for DefWithBodyId {
fn child_by_source(&self, db: &impl DefDatabase) -> DynMap {
let body = db.body(*self);
body.item_scope.child_by_source(db)
}
}

View file

@ -3,16 +3,17 @@
use std::sync::Arc; use std::sync::Arc;
use hir_expand::{ use hir_expand::{
name::{self, AsName, Name}, name::{name, AsName, Name},
AstId, AstId, InFile,
}; };
use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner}; use ra_syntax::ast::{self, AstNode, ImplItem, ModuleItemOwner, NameOwner, TypeAscriptionOwner};
use crate::{ use crate::{
db::DefDatabase, db::DefDatabase,
src::HasSource,
type_ref::{Mutability, TypeRef}, type_ref::{Mutability, TypeRef},
AssocItemId, AstItemDef, ConstId, ConstLoc, ContainerId, FunctionId, FunctionLoc, HasSource, AssocContainerId, AssocItemId, ConstId, ConstLoc, Expander, FunctionId, FunctionLoc, HasModule,
ImplId, Intern, Lookup, StaticId, TraitId, TypeAliasId, TypeAliasLoc, ImplId, Intern, Lookup, ModuleId, StaticId, TraitId, TypeAliasId, TypeAliasLoc,
}; };
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
@ -36,7 +37,7 @@ impl FunctionData {
let self_type = if let Some(type_ref) = self_param.ascribed_type() { let self_type = if let Some(type_ref) = self_param.ascribed_type() {
TypeRef::from_ast(type_ref) TypeRef::from_ast(type_ref)
} else { } else {
let self_type = TypeRef::Path(name::SELF_TYPE.into()); let self_type = TypeRef::Path(name![Self].into());
match self_param.kind() { match self_param.kind() {
ast::SelfParamKind::Owned => self_type, ast::SelfParamKind::Owned => self_type,
ast::SelfParamKind::Ref => { ast::SelfParamKind::Ref => {
@ -93,12 +94,12 @@ pub struct TraitData {
impl TraitData { impl TraitData {
pub(crate) fn trait_data_query(db: &impl DefDatabase, tr: TraitId) -> Arc<TraitData> { pub(crate) fn trait_data_query(db: &impl DefDatabase, tr: TraitId) -> Arc<TraitData> {
let src = tr.source(db); let src = tr.lookup(db).source(db);
let name = src.value.name().map_or_else(Name::missing, |n| n.as_name()); let name = src.value.name().map_or_else(Name::missing, |n| n.as_name());
let auto = src.value.is_auto(); let auto = src.value.is_auto();
let ast_id_map = db.ast_id_map(src.file_id); let ast_id_map = db.ast_id_map(src.file_id);
let container = ContainerId::TraitId(tr); let container = AssocContainerId::TraitId(tr);
let items = if let Some(item_list) = src.value.item_list() { let items = if let Some(item_list) = src.value.item_list() {
item_list item_list
.impl_items() .impl_items()
@ -166,46 +167,24 @@ pub struct ImplData {
impl ImplData { impl ImplData {
pub(crate) fn impl_data_query(db: &impl DefDatabase, id: ImplId) -> Arc<ImplData> { pub(crate) fn impl_data_query(db: &impl DefDatabase, id: ImplId) -> Arc<ImplData> {
let src = id.source(db); let impl_loc = id.lookup(db);
let items = db.ast_id_map(src.file_id); let src = impl_loc.source(db);
let target_trait = src.value.target_trait().map(TypeRef::from_ast); let target_trait = src.value.target_trait().map(TypeRef::from_ast);
let target_type = TypeRef::from_ast_opt(src.value.target_type()); let target_type = TypeRef::from_ast_opt(src.value.target_type());
let is_negative = src.value.is_negative(); let is_negative = src.value.is_negative();
let module_id = impl_loc.container.module(db);
let items = if let Some(item_list) = src.value.item_list() { let mut items = Vec::new();
item_list if let Some(item_list) = src.value.item_list() {
.impl_items() items.extend(collect_impl_items(db, item_list.impl_items(), src.file_id, id));
.map(|item_node| match item_node { items.extend(collect_impl_items_in_macros(
ast::ImplItem::FnDef(it) => { db,
let def = FunctionLoc { module_id,
container: ContainerId::ImplId(id), &src.with_value(item_list),
ast_id: AstId::new(src.file_id, items.ast_id(&it)), id,
} ));
.intern(db); }
def.into()
}
ast::ImplItem::ConstDef(it) => {
let def = ConstLoc {
container: ContainerId::ImplId(id),
ast_id: AstId::new(src.file_id, items.ast_id(&it)),
}
.intern(db);
def.into()
}
ast::ImplItem::TypeAliasDef(it) => {
let def = TypeAliasLoc {
container: ContainerId::ImplId(id),
ast_id: AstId::new(src.file_id, items.ast_id(&it)),
}
.intern(db);
def.into()
}
})
.collect()
} else {
Vec::new()
};
let res = ImplData { target_trait, target_type, items, is_negative }; let res = ImplData { target_trait, target_type, items, is_negative };
Arc::new(res) Arc::new(res)
@ -236,3 +215,92 @@ impl ConstData {
ConstData { name, type_ref } ConstData { name, type_ref }
} }
} }
fn collect_impl_items_in_macros(
db: &impl DefDatabase,
module_id: ModuleId,
impl_block: &InFile<ast::ItemList>,
id: ImplId,
) -> Vec<AssocItemId> {
let mut expander = Expander::new(db, impl_block.file_id, module_id);
let mut res = Vec::new();
// We set a limit to protect against infinite recursion
let limit = 100;
for m in impl_block.value.syntax().children().filter_map(ast::MacroCall::cast) {
res.extend(collect_impl_items_in_macro(db, &mut expander, m, id, limit))
}
res
}
fn collect_impl_items_in_macro(
db: &impl DefDatabase,
expander: &mut Expander,
m: ast::MacroCall,
id: ImplId,
limit: usize,
) -> Vec<AssocItemId> {
if limit == 0 {
return Vec::new();
}
if let Some((mark, items)) = expander.enter_expand(db, m) {
let items: InFile<ast::MacroItems> = expander.to_source(items);
let mut res = collect_impl_items(
db,
items.value.items().filter_map(|it| ImplItem::cast(it.syntax().clone())),
items.file_id,
id,
);
// Recursive collect macros
// Note that ast::ModuleItem do not include ast::MacroCall
// We cannot use ModuleItemOwner::items here
for it in items.value.syntax().children().filter_map(ast::MacroCall::cast) {
res.extend(collect_impl_items_in_macro(db, expander, it, id, limit - 1))
}
expander.exit(db, mark);
res
} else {
Vec::new()
}
}
fn collect_impl_items(
db: &impl DefDatabase,
impl_items: impl Iterator<Item = ImplItem>,
file_id: crate::HirFileId,
id: ImplId,
) -> Vec<AssocItemId> {
let items = db.ast_id_map(file_id);
impl_items
.map(|item_node| match item_node {
ast::ImplItem::FnDef(it) => {
let def = FunctionLoc {
container: AssocContainerId::ImplId(id),
ast_id: AstId::new(file_id, items.ast_id(&it)),
}
.intern(db);
def.into()
}
ast::ImplItem::ConstDef(it) => {
let def = ConstLoc {
container: AssocContainerId::ImplId(id),
ast_id: AstId::new(file_id, items.ast_id(&it)),
}
.intern(db);
def.into()
}
ast::ImplItem::TypeAliasDef(it) => {
let def = TypeAliasLoc {
container: AssocContainerId::ImplId(id),
ast_id: AstId::new(file_id, items.ast_id(&it)),
}
.intern(db);
def.into()
}
})
.collect()
}

View file

@ -3,7 +3,7 @@ use std::sync::Arc;
use hir_expand::{db::AstDatabase, HirFileId}; use hir_expand::{db::AstDatabase, HirFileId};
use ra_db::{salsa, CrateId, SourceDatabase}; use ra_db::{salsa, CrateId, SourceDatabase};
use ra_syntax::{ast, SmolStr}; use ra_syntax::SmolStr;
use crate::{ use crate::{
adt::{EnumData, StructData}, adt::{EnumData, StructData},
@ -13,13 +13,10 @@ use crate::{
docs::Documentation, docs::Documentation,
generics::GenericParams, generics::GenericParams,
lang_item::{LangItemTarget, LangItems}, lang_item::{LangItemTarget, LangItems},
nameres::{ nameres::{raw::RawItems, CrateDefMap},
raw::{ImportSourceMap, RawItems}, AttrDefId, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, FunctionId, FunctionLoc,
CrateDefMap, GenericDefId, ImplId, ImplLoc, ModuleId, StaticId, StaticLoc, StructId, StructLoc, TraitId,
}, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc,
AttrDefId, ConstId, ConstLoc, DefWithBodyId, EnumId, FunctionId, FunctionLoc, GenericDefId,
ImplId, ItemLoc, ModuleId, StaticId, StaticLoc, StructId, TraitId, TypeAliasId, TypeAliasLoc,
UnionId,
}; };
#[salsa::query_group(InternDatabaseStorage)] #[salsa::query_group(InternDatabaseStorage)]
@ -27,31 +24,25 @@ pub trait InternDatabase: SourceDatabase {
#[salsa::interned] #[salsa::interned]
fn intern_function(&self, loc: FunctionLoc) -> FunctionId; fn intern_function(&self, loc: FunctionLoc) -> FunctionId;
#[salsa::interned] #[salsa::interned]
fn intern_struct(&self, loc: ItemLoc<ast::StructDef>) -> StructId; fn intern_struct(&self, loc: StructLoc) -> StructId;
#[salsa::interned] #[salsa::interned]
fn intern_union(&self, loc: ItemLoc<ast::UnionDef>) -> UnionId; fn intern_union(&self, loc: UnionLoc) -> UnionId;
#[salsa::interned] #[salsa::interned]
fn intern_enum(&self, loc: ItemLoc<ast::EnumDef>) -> EnumId; fn intern_enum(&self, loc: EnumLoc) -> EnumId;
#[salsa::interned] #[salsa::interned]
fn intern_const(&self, loc: ConstLoc) -> ConstId; fn intern_const(&self, loc: ConstLoc) -> ConstId;
#[salsa::interned] #[salsa::interned]
fn intern_static(&self, loc: StaticLoc) -> StaticId; fn intern_static(&self, loc: StaticLoc) -> StaticId;
#[salsa::interned] #[salsa::interned]
fn intern_trait(&self, loc: ItemLoc<ast::TraitDef>) -> TraitId; fn intern_trait(&self, loc: TraitLoc) -> TraitId;
#[salsa::interned] #[salsa::interned]
fn intern_type_alias(&self, loc: TypeAliasLoc) -> TypeAliasId; fn intern_type_alias(&self, loc: TypeAliasLoc) -> TypeAliasId;
#[salsa::interned] #[salsa::interned]
fn intern_impl(&self, loc: ItemLoc<ast::ImplBlock>) -> ImplId; fn intern_impl(&self, loc: ImplLoc) -> ImplId;
} }
#[salsa::query_group(DefDatabaseStorage)] #[salsa::query_group(DefDatabaseStorage)]
pub trait DefDatabase: InternDatabase + AstDatabase { pub trait DefDatabase: InternDatabase + AstDatabase {
#[salsa::invoke(RawItems::raw_items_with_source_map_query)]
fn raw_items_with_source_map(
&self,
file_id: HirFileId,
) -> (Arc<RawItems>, Arc<ImportSourceMap>);
#[salsa::invoke(RawItems::raw_items_query)] #[salsa::invoke(RawItems::raw_items_query)]
fn raw_items(&self, file_id: HirFileId) -> Arc<RawItems>; fn raw_items(&self, file_id: HirFileId) -> Arc<RawItems>;

View file

@ -6,7 +6,7 @@ use hir_expand::diagnostics::Diagnostic;
use ra_db::RelativePathBuf; use ra_db::RelativePathBuf;
use ra_syntax::{ast, AstPtr, SyntaxNodePtr}; use ra_syntax::{ast, AstPtr, SyntaxNodePtr};
use hir_expand::{HirFileId, Source}; use hir_expand::{HirFileId, InFile};
#[derive(Debug)] #[derive(Debug)]
pub struct UnresolvedModule { pub struct UnresolvedModule {
@ -19,8 +19,8 @@ impl Diagnostic for UnresolvedModule {
fn message(&self) -> String { fn message(&self) -> String {
"unresolved module".to_string() "unresolved module".to_string()
} }
fn source(&self) -> Source<SyntaxNodePtr> { fn source(&self) -> InFile<SyntaxNodePtr> {
Source { file_id: self.file, value: self.decl.into() } InFile { file_id: self.file, value: self.decl.into() }
} }
fn as_any(&self) -> &(dyn Any + Send + 'static) { fn as_any(&self) -> &(dyn Any + Send + 'static) {
self self

View file

@ -5,10 +5,14 @@
use std::sync::Arc; use std::sync::Arc;
use hir_expand::either::Either; use either::Either;
use ra_syntax::ast; use ra_syntax::ast;
use crate::{db::DefDatabase, AdtId, AstItemDef, AttrDefId, HasChildSource, HasSource, Lookup}; use crate::{
db::DefDatabase,
src::{HasChildSource, HasSource},
AdtId, AttrDefId, Lookup,
};
/// Holds documentation /// Holds documentation
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
@ -42,21 +46,21 @@ impl Documentation {
AttrDefId::StructFieldId(it) => { AttrDefId::StructFieldId(it) => {
let src = it.parent.child_source(db); let src = it.parent.child_source(db);
match &src.value[it.local_id] { match &src.value[it.local_id] {
Either::A(_tuple) => None, Either::Left(_tuple) => None,
Either::B(record) => docs_from_ast(record), Either::Right(record) => docs_from_ast(record),
} }
} }
AttrDefId::AdtId(it) => match it { AttrDefId::AdtId(it) => match it {
AdtId::StructId(it) => docs_from_ast(&it.source(db).value), AdtId::StructId(it) => docs_from_ast(&it.lookup(db).source(db).value),
AdtId::EnumId(it) => docs_from_ast(&it.source(db).value), AdtId::EnumId(it) => docs_from_ast(&it.lookup(db).source(db).value),
AdtId::UnionId(it) => docs_from_ast(&it.source(db).value), AdtId::UnionId(it) => docs_from_ast(&it.lookup(db).source(db).value),
}, },
AttrDefId::EnumVariantId(it) => { AttrDefId::EnumVariantId(it) => {
let src = it.parent.child_source(db); let src = it.parent.child_source(db);
docs_from_ast(&src.value[it.local_id]) docs_from_ast(&src.value[it.local_id])
} }
AttrDefId::TraitId(it) => docs_from_ast(&it.source(db).value), AttrDefId::TraitId(it) => docs_from_ast(&it.lookup(db).source(db).value),
AttrDefId::MacroDefId(it) => docs_from_ast(&it.ast_id.to_node(db)), AttrDefId::MacroDefId(it) => docs_from_ast(&it.ast_id?.to_node(db)),
AttrDefId::ConstId(it) => docs_from_ast(&it.lookup(db).source(db).value), AttrDefId::ConstId(it) => docs_from_ast(&it.lookup(db).source(db).value),
AttrDefId::StaticId(it) => docs_from_ast(&it.lookup(db).source(db).value), AttrDefId::StaticId(it) => docs_from_ast(&it.lookup(db).source(db).value),
AttrDefId::FunctionId(it) => docs_from_ast(&it.lookup(db).source(db).value), AttrDefId::FunctionId(it) => docs_from_ast(&it.lookup(db).source(db).value),

View file

@ -0,0 +1,108 @@
//! This module defines a `DynMap` -- a container for heterogeneous maps.
//!
//! This means that `DynMap` stores a bunch of hash maps inside, and those maps
//! can be of different types.
//!
//! It is used like this:
//!
//! ```
//! // keys define submaps of a `DynMap`
//! const STRING_TO_U32: Key<String, u32> = Key::new();
//! const U32_TO_VEC: Key<u32, Vec<bool>> = Key::new();
//!
//! // Note: concrete type, no type params!
//! let mut map = DynMap::new();
//!
//! // To access a specific map, index the `DynMap` by `Key`:
//! map[STRING_TO_U32].insert("hello".to_string(), 92);
//! let value = map[U32_TO_VEC].get(92);
//! assert!(value.is_none());
//! ```
//!
//! This is a work of fiction. Any similarities to Kotlin's `BindingContext` are
//! a coincidence.
use std::{
hash::Hash,
marker::PhantomData,
ops::{Index, IndexMut},
};
use anymap::Map;
use rustc_hash::FxHashMap;
pub struct Key<K, V, P = (K, V)> {
_phantom: PhantomData<(K, V, P)>,
}
impl<K, V, P> Key<K, V, P> {
pub(crate) const fn new() -> Key<K, V, P> {
Key { _phantom: PhantomData }
}
}
impl<K, V, P> Copy for Key<K, V, P> {}
impl<K, V, P> Clone for Key<K, V, P> {
fn clone(&self) -> Key<K, V, P> {
*self
}
}
pub trait Policy {
type K;
type V;
fn insert(map: &mut DynMap, key: Self::K, value: Self::V);
fn get<'a>(map: &'a DynMap, key: &Self::K) -> Option<&'a Self::V>;
}
impl<K: Hash + Eq + 'static, V: 'static> Policy for (K, V) {
type K = K;
type V = V;
fn insert(map: &mut DynMap, key: K, value: V) {
map.map.entry::<FxHashMap<K, V>>().or_insert_with(Default::default).insert(key, value);
}
fn get<'a>(map: &'a DynMap, key: &K) -> Option<&'a V> {
map.map.get::<FxHashMap<K, V>>()?.get(key)
}
}
pub struct DynMap {
pub(crate) map: Map,
}
impl Default for DynMap {
fn default() -> Self {
DynMap { map: Map::new() }
}
}
#[repr(transparent)]
pub struct KeyMap<KEY> {
map: DynMap,
_phantom: PhantomData<KEY>,
}
impl<P: Policy> KeyMap<Key<P::K, P::V, P>> {
pub fn insert(&mut self, key: P::K, value: P::V) {
P::insert(&mut self.map, key, value)
}
pub fn get(&self, key: &P::K) -> Option<&P::V> {
P::get(&self.map, key)
}
}
impl<P: Policy> Index<Key<P::K, P::V, P>> for DynMap {
type Output = KeyMap<Key<P::K, P::V, P>>;
fn index(&self, _key: Key<P::K, P::V, P>) -> &Self::Output {
// Safe due to `#[repr(transparent)]`.
unsafe { std::mem::transmute::<&DynMap, &KeyMap<Key<P::K, P::V, P>>>(self) }
}
}
impl<P: Policy> IndexMut<Key<P::K, P::V, P>> for DynMap {
fn index_mut(&mut self, _key: Key<P::K, P::V, P>) -> &mut Self::Output {
// Safe due to `#[repr(transparent)]`.
unsafe { std::mem::transmute::<&mut DynMap, &mut KeyMap<Key<P::K, P::V, P>>>(self) }
}
}

View file

@ -14,6 +14,7 @@
use hir_expand::name::Name; use hir_expand::name::Name;
use ra_arena::{impl_arena_id, RawId}; use ra_arena::{impl_arena_id, RawId};
use ra_syntax::ast::RangeOp;
use crate::{ use crate::{
builtin_type::{BuiltinFloat, BuiltinInt}, builtin_type::{BuiltinFloat, BuiltinInt},
@ -130,6 +131,11 @@ pub enum Expr {
rhs: ExprId, rhs: ExprId,
op: Option<BinaryOp>, op: Option<BinaryOp>,
}, },
Range {
lhs: Option<ExprId>,
rhs: Option<ExprId>,
range_type: RangeOp,
},
Index { Index {
base: ExprId, base: ExprId,
index: ExprId, index: ExprId,
@ -137,6 +143,7 @@ pub enum Expr {
Lambda { Lambda {
args: Vec<PatId>, args: Vec<PatId>,
arg_types: Vec<Option<TypeRef>>, arg_types: Vec<Option<TypeRef>>,
ret_type: Option<TypeRef>,
body: ExprId, body: ExprId,
}, },
Tuple { Tuple {
@ -288,6 +295,14 @@ impl Expr {
f(*lhs); f(*lhs);
f(*rhs); f(*rhs);
} }
Expr::Range { lhs, rhs, .. } => {
if let Some(lhs) = rhs {
f(*lhs);
}
if let Some(rhs) = lhs {
f(*rhs);
}
}
Expr::Index { base, index } => { Expr::Index { base, index } => {
f(*base); f(*base);
f(*index); f(*index);

View file

@ -4,20 +4,29 @@
//! in rustc. //! in rustc.
use std::sync::Arc; use std::sync::Arc;
use hir_expand::name::{self, AsName, Name}; use either::Either;
use hir_expand::{
name::{name, AsName, Name},
InFile,
};
use ra_arena::{map::ArenaMap, Arena};
use ra_db::FileId;
use ra_syntax::ast::{self, NameOwner, TypeBoundsOwner, TypeParamsOwner}; use ra_syntax::ast::{self, NameOwner, TypeBoundsOwner, TypeParamsOwner};
use crate::{ use crate::{
child_by_source::ChildBySource,
db::DefDatabase, db::DefDatabase,
dyn_map::DynMap,
keys,
src::HasChildSource,
src::HasSource,
type_ref::{TypeBound, TypeRef}, type_ref::{TypeBound, TypeRef},
AdtId, AstItemDef, ContainerId, GenericDefId, HasSource, Lookup, AdtId, GenericDefId, LocalTypeParamId, Lookup, TypeParamId,
}; };
/// Data about a generic parameter (to a function, struct, impl, ...). /// Data about a generic parameter (to a function, struct, impl, ...).
#[derive(Clone, PartialEq, Eq, Debug)] #[derive(Clone, PartialEq, Eq, Debug)]
pub struct GenericParam { pub struct TypeParamData {
// FIXME: give generic params proper IDs
pub idx: u32,
pub name: Name, pub name: Name,
pub default: Option<TypeRef>, pub default: Option<TypeRef>,
} }
@ -25,8 +34,8 @@ pub struct GenericParam {
/// Data about the generic parameters of a function, struct, impl, etc. /// Data about the generic parameters of a function, struct, impl, etc.
#[derive(Clone, PartialEq, Eq, Debug)] #[derive(Clone, PartialEq, Eq, Debug)]
pub struct GenericParams { pub struct GenericParams {
pub parent_params: Option<Arc<GenericParams>>, pub types: Arena<LocalTypeParamId, TypeParamData>,
pub params: Vec<GenericParam>, // lifetimes: Arena<LocalLifetimeParamId, LifetimeParamData>,
pub where_predicates: Vec<WherePredicate>, pub where_predicates: Vec<WherePredicate>,
} }
@ -40,63 +49,87 @@ pub struct WherePredicate {
pub bound: TypeBound, pub bound: TypeBound,
} }
type SourceMap = ArenaMap<LocalTypeParamId, Either<ast::TraitDef, ast::TypeParam>>;
impl GenericParams { impl GenericParams {
pub(crate) fn generic_params_query( pub(crate) fn generic_params_query(
db: &impl DefDatabase, db: &impl DefDatabase,
def: GenericDefId, def: GenericDefId,
) -> Arc<GenericParams> { ) -> Arc<GenericParams> {
let parent_generics = parent_generic_def(db, def).map(|it| db.generic_params(it)); let (params, _source_map) = GenericParams::new(db, def.into());
Arc::new(GenericParams::new(db, def.into(), parent_generics)) Arc::new(params)
} }
fn new( fn new(db: &impl DefDatabase, def: GenericDefId) -> (GenericParams, InFile<SourceMap>) {
db: &impl DefDatabase, let mut generics = GenericParams { types: Arena::default(), where_predicates: Vec::new() };
def: GenericDefId, let mut sm = ArenaMap::default();
parent_params: Option<Arc<GenericParams>>,
) -> GenericParams {
let mut generics =
GenericParams { params: Vec::new(), parent_params, where_predicates: Vec::new() };
let start = generics.parent_params.as_ref().map(|p| p.params.len()).unwrap_or(0) as u32;
// FIXME: add `: Sized` bound for everything except for `Self` in traits // FIXME: add `: Sized` bound for everything except for `Self` in traits
match def { let file_id = match def {
GenericDefId::FunctionId(it) => generics.fill(&it.lookup(db).source(db).value, start), GenericDefId::FunctionId(it) => {
GenericDefId::AdtId(AdtId::StructId(it)) => generics.fill(&it.source(db).value, start), let src = it.lookup(db).source(db);
GenericDefId::AdtId(AdtId::UnionId(it)) => generics.fill(&it.source(db).value, start), generics.fill(&mut sm, &src.value);
GenericDefId::AdtId(AdtId::EnumId(it)) => generics.fill(&it.source(db).value, start), src.file_id
}
GenericDefId::AdtId(AdtId::StructId(it)) => {
let src = it.lookup(db).source(db);
generics.fill(&mut sm, &src.value);
src.file_id
}
GenericDefId::AdtId(AdtId::UnionId(it)) => {
let src = it.lookup(db).source(db);
generics.fill(&mut sm, &src.value);
src.file_id
}
GenericDefId::AdtId(AdtId::EnumId(it)) => {
let src = it.lookup(db).source(db);
generics.fill(&mut sm, &src.value);
src.file_id
}
GenericDefId::TraitId(it) => { GenericDefId::TraitId(it) => {
let src = it.lookup(db).source(db);
// traits get the Self type as an implicit first type parameter // traits get the Self type as an implicit first type parameter
generics.params.push(GenericParam { let self_param_id =
idx: start, generics.types.alloc(TypeParamData { name: name![Self], default: None });
name: name::SELF_TYPE, sm.insert(self_param_id, Either::Left(src.value.clone()));
default: None,
});
generics.fill(&it.source(db).value, start + 1);
// add super traits as bounds on Self // add super traits as bounds on Self
// i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar // i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar
let self_param = TypeRef::Path(name::SELF_TYPE.into()); let self_param = TypeRef::Path(name![Self].into());
generics.fill_bounds(&it.source(db).value, self_param); generics.fill_bounds(&src.value, self_param);
generics.fill(&mut sm, &src.value);
src.file_id
}
GenericDefId::TypeAliasId(it) => {
let src = it.lookup(db).source(db);
generics.fill(&mut sm, &src.value);
src.file_id
} }
GenericDefId::TypeAliasId(it) => generics.fill(&it.lookup(db).source(db).value, start),
// Note that we don't add `Self` here: in `impl`s, `Self` is not a // Note that we don't add `Self` here: in `impl`s, `Self` is not a
// type-parameter, but rather is a type-alias for impl's target // type-parameter, but rather is a type-alias for impl's target
// type, so this is handled by the resolver. // type, so this is handled by the resolver.
GenericDefId::ImplId(it) => generics.fill(&it.source(db).value, start), GenericDefId::ImplId(it) => {
GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => {} let src = it.lookup(db).source(db);
} generics.fill(&mut sm, &src.value);
src.file_id
}
// We won't be using this ID anyway
GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => FileId(!0).into(),
};
generics (generics, InFile::new(file_id, sm))
} }
fn fill(&mut self, node: &impl TypeParamsOwner, start: u32) { fn fill(&mut self, sm: &mut SourceMap, node: &dyn TypeParamsOwner) {
if let Some(params) = node.type_param_list() { if let Some(params) = node.type_param_list() {
self.fill_params(params, start) self.fill_params(sm, params)
} }
if let Some(where_clause) = node.where_clause() { if let Some(where_clause) = node.where_clause() {
self.fill_where_predicates(where_clause); self.fill_where_predicates(where_clause);
} }
} }
fn fill_bounds(&mut self, node: &impl ast::TypeBoundsOwner, type_ref: TypeRef) { fn fill_bounds(&mut self, node: &dyn ast::TypeBoundsOwner, type_ref: TypeRef) {
for bound in for bound in
node.type_bound_list().iter().flat_map(|type_bound_list| type_bound_list.bounds()) node.type_bound_list().iter().flat_map(|type_bound_list| type_bound_list.bounds())
{ {
@ -104,13 +137,14 @@ impl GenericParams {
} }
} }
fn fill_params(&mut self, params: ast::TypeParamList, start: u32) { fn fill_params(&mut self, sm: &mut SourceMap, params: ast::TypeParamList) {
for (idx, type_param) in params.type_params().enumerate() { for type_param in params.type_params() {
let name = type_param.name().map_or_else(Name::missing, |it| it.as_name()); let name = type_param.name().map_or_else(Name::missing, |it| it.as_name());
// FIXME: Use `Path::from_src` // FIXME: Use `Path::from_src`
let default = type_param.default_type().map(TypeRef::from_ast); let default = type_param.default_type().map(TypeRef::from_ast);
let param = GenericParam { idx: idx as u32 + start, name: name.clone(), default }; let param = TypeParamData { name: name.clone(), default };
self.params.push(param); let param_id = self.types.alloc(param);
sm.insert(param_id, Either::Right(type_param.clone()));
let type_ref = TypeRef::Path(name.into()); let type_ref = TypeRef::Path(name.into());
self.fill_bounds(&type_param, type_ref); self.fill_bounds(&type_param, type_ref);
@ -139,45 +173,31 @@ impl GenericParams {
self.where_predicates.push(WherePredicate { type_ref, bound }); self.where_predicates.push(WherePredicate { type_ref, bound });
} }
pub fn find_by_name(&self, name: &Name) -> Option<&GenericParam> { pub fn find_by_name(&self, name: &Name) -> Option<LocalTypeParamId> {
self.params.iter().find(|p| &p.name == name) self.types.iter().find_map(|(id, p)| if &p.name == name { Some(id) } else { None })
} }
}
pub fn count_parent_params(&self) -> usize { impl HasChildSource for GenericDefId {
self.parent_params.as_ref().map(|p| p.count_params_including_parent()).unwrap_or(0) type ChildId = LocalTypeParamId;
type Value = Either<ast::TraitDef, ast::TypeParam>;
fn child_source(&self, db: &impl DefDatabase) -> InFile<SourceMap> {
let (_, sm) = GenericParams::new(db, *self);
sm
} }
}
pub fn count_params_including_parent(&self) -> usize { impl ChildBySource for GenericDefId {
let parent_count = self.count_parent_params(); fn child_by_source(&self, db: &impl DefDatabase) -> DynMap {
parent_count + self.params.len() let mut res = DynMap::default();
} let arena_map = self.child_source(db);
let arena_map = arena_map.as_ref();
fn for_each_param<'a>(&'a self, f: &mut impl FnMut(&'a GenericParam)) { for (local_id, src) in arena_map.value.iter() {
if let Some(parent) = &self.parent_params { let id = TypeParamId { parent: *self, local_id };
parent.for_each_param(f); if let Either::Right(type_param) = src {
res[keys::TYPE_PARAM].insert(arena_map.with_value(type_param.clone()), id)
}
} }
self.params.iter().for_each(f); res
}
pub fn params_including_parent(&self) -> Vec<&GenericParam> {
let mut vec = Vec::with_capacity(self.count_params_including_parent());
self.for_each_param(&mut |p| vec.push(p));
vec
}
}
fn parent_generic_def(db: &impl DefDatabase, def: GenericDefId) -> Option<GenericDefId> {
let container = match def {
GenericDefId::FunctionId(it) => it.lookup(db).container,
GenericDefId::TypeAliasId(it) => it.lookup(db).container,
GenericDefId::ConstId(it) => it.lookup(db).container,
GenericDefId::EnumVariantId(it) => return Some(it.parent.into()),
GenericDefId::AdtId(_) | GenericDefId::TraitId(_) | GenericDefId::ImplId(_) => return None,
};
match container {
ContainerId::ImplId(it) => Some(it.into()),
ContainerId::TraitId(it) => Some(it.into()),
ContainerId::ModuleId(_) => None,
} }
} }

View file

@ -0,0 +1,172 @@
//! Describes items defined or visible (ie, imported) in a certain scope.
//! This is shared between modules and blocks.
use hir_expand::name::Name;
use once_cell::sync::Lazy;
use rustc_hash::FxHashMap;
use crate::{per_ns::PerNs, AdtId, BuiltinType, ImplId, MacroDefId, ModuleDefId, TraitId};
#[derive(Debug, Default, PartialEq, Eq)]
pub struct ItemScope {
visible: FxHashMap<Name, PerNs>,
defs: Vec<ModuleDefId>,
impls: Vec<ImplId>,
/// Macros visible in current module in legacy textual scope
///
/// For macros invoked by an unqualified identifier like `bar!()`, `legacy_macros` will be searched in first.
/// If it yields no result, then it turns to module scoped `macros`.
/// It macros with name qualified with a path like `crate::foo::bar!()`, `legacy_macros` will be skipped,
/// and only normal scoped `macros` will be searched in.
///
/// Note that this automatically inherit macros defined textually before the definition of module itself.
///
/// Module scoped macros will be inserted into `items` instead of here.
// FIXME: Macro shadowing in one module is not properly handled. Non-item place macros will
// be all resolved to the last one defined if shadowing happens.
legacy_macros: FxHashMap<Name, MacroDefId>,
}
static BUILTIN_SCOPE: Lazy<FxHashMap<Name, PerNs>> = Lazy::new(|| {
BuiltinType::ALL
.iter()
.map(|(name, ty)| (name.clone(), PerNs::types(ty.clone().into())))
.collect()
});
/// Shadow mode for builtin type which can be shadowed by module.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub(crate) enum BuiltinShadowMode {
// Prefer Module
Module,
// Prefer Other Types
Other,
}
/// Legacy macros can only be accessed through special methods like `get_legacy_macros`.
/// Other methods will only resolve values, types and module scoped macros only.
impl ItemScope {
pub fn entries<'a>(&'a self) -> impl Iterator<Item = (&'a Name, PerNs)> + 'a {
//FIXME: shadowing
self.visible.iter().chain(BUILTIN_SCOPE.iter()).map(|(n, def)| (n, *def))
}
pub fn entries_without_primitives<'a>(
&'a self,
) -> impl Iterator<Item = (&'a Name, PerNs)> + 'a {
self.visible.iter().map(|(n, def)| (n, *def))
}
pub fn declarations(&self) -> impl Iterator<Item = ModuleDefId> + '_ {
self.defs.iter().copied()
}
pub fn impls(&self) -> impl Iterator<Item = ImplId> + ExactSizeIterator + '_ {
self.impls.iter().copied()
}
/// Iterate over all module scoped macros
pub(crate) fn macros<'a>(&'a self) -> impl Iterator<Item = (&'a Name, MacroDefId)> + 'a {
self.visible.iter().filter_map(|(name, def)| def.take_macros().map(|macro_| (name, macro_)))
}
/// Iterate over all legacy textual scoped macros visible at the end of the module
pub(crate) fn legacy_macros<'a>(&'a self) -> impl Iterator<Item = (&'a Name, MacroDefId)> + 'a {
self.legacy_macros.iter().map(|(name, def)| (name, *def))
}
/// Get a name from current module scope, legacy macros are not included
pub(crate) fn get(&self, name: &Name, shadow: BuiltinShadowMode) -> PerNs {
match shadow {
BuiltinShadowMode::Module => self
.visible
.get(name)
.or_else(|| BUILTIN_SCOPE.get(name))
.copied()
.unwrap_or_else(PerNs::none),
BuiltinShadowMode::Other => {
let item = self.visible.get(name).copied();
if let Some(def) = item {
if let Some(ModuleDefId::ModuleId(_)) = def.take_types() {
return BUILTIN_SCOPE
.get(name)
.copied()
.or(item)
.unwrap_or_else(PerNs::none);
}
}
item.or_else(|| BUILTIN_SCOPE.get(name).copied()).unwrap_or_else(PerNs::none)
}
}
}
pub(crate) fn traits<'a>(&'a self) -> impl Iterator<Item = TraitId> + 'a {
self.visible.values().filter_map(|def| match def.take_types() {
Some(ModuleDefId::TraitId(t)) => Some(t),
_ => None,
})
}
pub(crate) fn define_def(&mut self, def: ModuleDefId) {
self.defs.push(def)
}
pub(crate) fn get_legacy_macro(&self, name: &Name) -> Option<MacroDefId> {
self.legacy_macros.get(name).copied()
}
pub(crate) fn define_impl(&mut self, imp: ImplId) {
self.impls.push(imp)
}
pub(crate) fn define_legacy_macro(&mut self, name: Name, mac: MacroDefId) {
self.legacy_macros.insert(name, mac);
}
pub(crate) fn push_res(&mut self, name: Name, def: PerNs) -> bool {
let mut changed = false;
let existing = self.visible.entry(name.clone()).or_default();
if existing.types.is_none() && def.types.is_some() {
existing.types = def.types;
changed = true;
}
if existing.values.is_none() && def.values.is_some() {
existing.values = def.values;
changed = true;
}
if existing.macros.is_none() && def.macros.is_some() {
existing.macros = def.macros;
changed = true;
}
changed
}
pub(crate) fn collect_resolutions(&self) -> Vec<(Name, PerNs)> {
self.visible.iter().map(|(name, res)| (name.clone(), res.clone())).collect()
}
pub(crate) fn collect_legacy_macros(&self) -> FxHashMap<Name, MacroDefId> {
self.legacy_macros.clone()
}
}
impl From<ModuleDefId> for PerNs {
fn from(def: ModuleDefId) -> PerNs {
match def {
ModuleDefId::ModuleId(_) => PerNs::types(def),
ModuleDefId::FunctionId(_) => PerNs::values(def),
ModuleDefId::AdtId(adt) => match adt {
AdtId::StructId(_) | AdtId::UnionId(_) => PerNs::both(def, def),
AdtId::EnumId(_) => PerNs::types(def),
},
ModuleDefId::EnumVariantId(_) => PerNs::both(def, def),
ModuleDefId::ConstId(_) | ModuleDefId::StaticId(_) => PerNs::values(def),
ModuleDefId::TraitId(_) => PerNs::types(def),
ModuleDefId::TypeAliasId(_) => PerNs::types(def),
ModuleDefId::BuiltinType(_) => PerNs::types(def),
}
}
}

View file

@ -0,0 +1,56 @@
//! keys to be used with `DynMap`
use std::marker::PhantomData;
use hir_expand::InFile;
use ra_syntax::{ast, AstNode, AstPtr};
use rustc_hash::FxHashMap;
use crate::{
dyn_map::{DynMap, Policy},
ConstId, EnumId, EnumVariantId, FunctionId, ImplId, StaticId, StructFieldId, StructId, TraitId,
TypeAliasId, TypeParamId, UnionId,
};
pub type Key<K, V> = crate::dyn_map::Key<InFile<K>, V, AstPtrPolicy<K, V>>;
pub const FUNCTION: Key<ast::FnDef, FunctionId> = Key::new();
pub const CONST: Key<ast::ConstDef, ConstId> = Key::new();
pub const STATIC: Key<ast::StaticDef, StaticId> = Key::new();
pub const TYPE_ALIAS: Key<ast::TypeAliasDef, TypeAliasId> = Key::new();
pub const IMPL: Key<ast::ImplBlock, ImplId> = Key::new();
pub const TRAIT: Key<ast::TraitDef, TraitId> = Key::new();
pub const STRUCT: Key<ast::StructDef, StructId> = Key::new();
pub const UNION: Key<ast::UnionDef, UnionId> = Key::new();
pub const ENUM: Key<ast::EnumDef, EnumId> = Key::new();
pub const ENUM_VARIANT: Key<ast::EnumVariant, EnumVariantId> = Key::new();
pub const TUPLE_FIELD: Key<ast::TupleFieldDef, StructFieldId> = Key::new();
pub const RECORD_FIELD: Key<ast::RecordFieldDef, StructFieldId> = Key::new();
pub const TYPE_PARAM: Key<ast::TypeParam, TypeParamId> = Key::new();
/// XXX: AST Nodes and SyntaxNodes have identity equality semantics: nodes are
/// equal if they point to exactly the same object.
///
/// In general, we do not guarantee that we have exactly one instance of a
/// syntax tree for each file. We probably should add such guarantee, but, for
/// the time being, we will use identity-less AstPtr comparison.
pub struct AstPtrPolicy<AST, ID> {
_phantom: PhantomData<(AST, ID)>,
}
impl<AST: AstNode + 'static, ID: 'static> Policy for AstPtrPolicy<AST, ID> {
type K = InFile<AST>;
type V = ID;
fn insert(map: &mut DynMap, key: InFile<AST>, value: ID) {
let key = key.as_ref().map(AstPtr::new);
map.map
.entry::<FxHashMap<InFile<AstPtr<AST>>, ID>>()
.or_insert_with(Default::default)
.insert(key, value);
}
fn get<'a>(map: &'a DynMap, key: &InFile<AST>) -> Option<&'a ID> {
let key = key.as_ref().map(AstPtr::new);
map.map.get::<FxHashMap<InFile<AstPtr<AST>>, ID>>()?.get(&key)
}
}

View file

@ -81,7 +81,7 @@ impl LangItems {
// Look for impl targets // Look for impl targets
let def_map = db.crate_def_map(module.krate); let def_map = db.crate_def_map(module.krate);
let module_data = &def_map[module.local_id]; let module_data = &def_map[module.local_id];
for &impl_block in module_data.impls.iter() { for impl_block in module_data.scope.impls() {
self.collect_lang_item(db, impl_block, LangItemTarget::ImplBlockId) self.collect_lang_item(db, impl_block, LangItemTarget::ImplBlockId)
} }

View file

@ -15,6 +15,10 @@ pub mod type_ref;
pub mod builtin_type; pub mod builtin_type;
pub mod diagnostics; pub mod diagnostics;
pub mod per_ns; pub mod per_ns;
pub mod item_scope;
pub mod dyn_map;
pub mod keys;
pub mod adt; pub mod adt;
pub mod data; pub mod data;
@ -29,23 +33,23 @@ pub mod resolver;
mod trace; mod trace;
pub mod nameres; pub mod nameres;
pub mod src;
pub mod child_by_source;
#[cfg(test)] #[cfg(test)]
mod test_db; mod test_db;
#[cfg(test)] #[cfg(test)]
mod marks; mod marks;
use std::hash::{Hash, Hasher}; use std::hash::Hash;
use hir_expand::{ast_id_map::FileAstId, db::AstDatabase, AstId, HirFileId, MacroDefId, Source}; use hir_expand::{ast_id_map::FileAstId, AstId, HirFileId, InFile, MacroDefId};
use ra_arena::{impl_arena_id, map::ArenaMap, RawId}; use ra_arena::{impl_arena_id, RawId};
use ra_db::{impl_intern_key, salsa, CrateId}; use ra_db::{impl_intern_key, salsa, CrateId};
use ra_syntax::{ast, AstNode}; use ra_syntax::{ast, AstNode};
use crate::{builtin_type::BuiltinType, db::InternDatabase}; use crate::body::Expander;
use crate::builtin_type::BuiltinType;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct LocalImportId(RawId);
impl_arena_id!(LocalImportId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ModuleId { pub struct ModuleId {
@ -59,122 +63,57 @@ pub struct ModuleId {
pub struct LocalModuleId(RawId); pub struct LocalModuleId(RawId);
impl_arena_id!(LocalModuleId); impl_arena_id!(LocalModuleId);
#[derive(Debug)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ItemLoc<N: AstNode> { pub struct ItemLoc<N: AstNode> {
pub(crate) module: ModuleId, pub container: ContainerId,
ast_id: AstId<N>, pub ast_id: AstId<N>,
} }
impl<N: AstNode> PartialEq for ItemLoc<N> { #[derive(Debug, Clone, PartialEq, Eq, Hash)]
fn eq(&self, other: &Self) -> bool { pub struct AssocItemLoc<N: AstNode> {
self.module == other.module && self.ast_id == other.ast_id pub container: AssocContainerId,
} pub ast_id: AstId<N>,
}
impl<N: AstNode> Eq for ItemLoc<N> {}
impl<N: AstNode> Hash for ItemLoc<N> {
fn hash<H: Hasher>(&self, hasher: &mut H) {
self.module.hash(hasher);
self.ast_id.hash(hasher);
}
} }
impl<N: AstNode> Clone for ItemLoc<N> { macro_rules! impl_intern {
fn clone(&self) -> ItemLoc<N> { ($id:ident, $loc:ident, $intern:ident, $lookup:ident) => {
ItemLoc { module: self.module, ast_id: self.ast_id } impl_intern_key!($id);
}
}
#[derive(Clone, Copy)] impl Intern for $loc {
pub struct LocationCtx<DB> { type ID = $id;
db: DB, fn intern(self, db: &impl db::DefDatabase) -> $id {
module: ModuleId, db.$intern(self)
file_id: HirFileId, }
} }
impl<'a, DB> LocationCtx<&'a DB> { impl Lookup for $id {
pub fn new(db: &'a DB, module: ModuleId, file_id: HirFileId) -> LocationCtx<&'a DB> { type Data = $loc;
LocationCtx { db, module, file_id } fn lookup(&self, db: &impl db::DefDatabase) -> $loc {
} db.$lookup(*self)
} }
}
pub trait AstItemDef<N: AstNode>: salsa::InternKey + Clone { };
fn intern(db: &impl InternDatabase, loc: ItemLoc<N>) -> Self;
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<N>;
fn from_ast_id(ctx: LocationCtx<&impl InternDatabase>, ast_id: FileAstId<N>) -> Self {
let loc = ItemLoc { module: ctx.module, ast_id: AstId::new(ctx.file_id, ast_id) };
Self::intern(ctx.db, loc)
}
fn source(self, db: &(impl AstDatabase + InternDatabase)) -> Source<N> {
let loc = self.lookup_intern(db);
let value = loc.ast_id.to_node(db);
Source { file_id: loc.ast_id.file_id(), value }
}
fn module(self, db: &impl InternDatabase) -> ModuleId {
let loc = self.lookup_intern(db);
loc.module
}
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct FunctionId(salsa::InternId); pub struct FunctionId(salsa::InternId);
impl_intern_key!(FunctionId); type FunctionLoc = AssocItemLoc<ast::FnDef>;
impl_intern!(FunctionId, FunctionLoc, intern_function, lookup_intern_function);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct FunctionLoc {
pub container: ContainerId,
pub ast_id: AstId<ast::FnDef>,
}
impl Intern for FunctionLoc {
type ID = FunctionId;
fn intern(self, db: &impl db::DefDatabase) -> FunctionId {
db.intern_function(self)
}
}
impl Lookup for FunctionId {
type Data = FunctionLoc;
fn lookup(&self, db: &impl db::DefDatabase) -> FunctionLoc {
db.lookup_intern_function(*self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StructId(salsa::InternId); pub struct StructId(salsa::InternId);
impl_intern_key!(StructId); type StructLoc = ItemLoc<ast::StructDef>;
impl AstItemDef<ast::StructDef> for StructId { impl_intern!(StructId, StructLoc, intern_struct, lookup_intern_struct);
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::StructDef>) -> Self {
db.intern_struct(loc)
}
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::StructDef> {
db.lookup_intern_struct(self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct UnionId(salsa::InternId); pub struct UnionId(salsa::InternId);
impl_intern_key!(UnionId); pub type UnionLoc = ItemLoc<ast::UnionDef>;
impl AstItemDef<ast::UnionDef> for UnionId { impl_intern!(UnionId, UnionLoc, intern_union, lookup_intern_union);
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::UnionDef>) -> Self {
db.intern_union(loc)
}
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::UnionDef> {
db.lookup_intern_union(self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct EnumId(salsa::InternId); pub struct EnumId(salsa::InternId);
impl_intern_key!(EnumId); pub type EnumLoc = ItemLoc<ast::EnumDef>;
impl AstItemDef<ast::EnumDef> for EnumId { impl_intern!(EnumId, EnumLoc, intern_enum, lookup_intern_enum);
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::EnumDef>) -> Self {
db.intern_enum(loc)
}
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::EnumDef> {
db.lookup_intern_enum(self)
}
}
// FIXME: rename to `VariantId`, only enums can ave variants // FIXME: rename to `VariantId`, only enums can ave variants
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -199,99 +138,39 @@ impl_arena_id!(LocalStructFieldId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ConstId(salsa::InternId); pub struct ConstId(salsa::InternId);
impl_intern_key!(ConstId); type ConstLoc = AssocItemLoc<ast::ConstDef>;
#[derive(Debug, Clone, PartialEq, Eq, Hash)] impl_intern!(ConstId, ConstLoc, intern_const, lookup_intern_const);
pub struct ConstLoc {
pub container: ContainerId,
pub ast_id: AstId<ast::ConstDef>,
}
impl Intern for ConstLoc {
type ID = ConstId;
fn intern(self, db: &impl db::DefDatabase) -> ConstId {
db.intern_const(self)
}
}
impl Lookup for ConstId {
type Data = ConstLoc;
fn lookup(&self, db: &impl db::DefDatabase) -> ConstLoc {
db.lookup_intern_const(*self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StaticId(salsa::InternId); pub struct StaticId(salsa::InternId);
impl_intern_key!(StaticId); pub type StaticLoc = ItemLoc<ast::StaticDef>;
impl_intern!(StaticId, StaticLoc, intern_static, lookup_intern_static);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct StaticLoc {
pub container: ModuleId,
pub ast_id: AstId<ast::StaticDef>,
}
impl Intern for StaticLoc {
type ID = StaticId;
fn intern(self, db: &impl db::DefDatabase) -> StaticId {
db.intern_static(self)
}
}
impl Lookup for StaticId {
type Data = StaticLoc;
fn lookup(&self, db: &impl db::DefDatabase) -> StaticLoc {
db.lookup_intern_static(*self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TraitId(salsa::InternId); pub struct TraitId(salsa::InternId);
impl_intern_key!(TraitId); pub type TraitLoc = ItemLoc<ast::TraitDef>;
impl AstItemDef<ast::TraitDef> for TraitId { impl_intern!(TraitId, TraitLoc, intern_trait, lookup_intern_trait);
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::TraitDef>) -> Self {
db.intern_trait(loc)
}
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::TraitDef> {
db.lookup_intern_trait(self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TypeAliasId(salsa::InternId); pub struct TypeAliasId(salsa::InternId);
impl_intern_key!(TypeAliasId); type TypeAliasLoc = AssocItemLoc<ast::TypeAliasDef>;
impl_intern!(TypeAliasId, TypeAliasLoc, intern_type_alias, lookup_intern_type_alias);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct TypeAliasLoc {
pub container: ContainerId,
pub ast_id: AstId<ast::TypeAliasDef>,
}
impl Intern for TypeAliasLoc {
type ID = TypeAliasId;
fn intern(self, db: &impl db::DefDatabase) -> TypeAliasId {
db.intern_type_alias(self)
}
}
impl Lookup for TypeAliasId {
type Data = TypeAliasLoc;
fn lookup(&self, db: &impl db::DefDatabase) -> TypeAliasLoc {
db.lookup_intern_type_alias(*self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ImplId(salsa::InternId); pub struct ImplId(salsa::InternId);
impl_intern_key!(ImplId); type ImplLoc = ItemLoc<ast::ImplBlock>;
impl AstItemDef<ast::ImplBlock> for ImplId { impl_intern!(ImplId, ImplLoc, intern_impl, lookup_intern_impl);
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::ImplBlock>) -> Self {
db.intern_impl(loc) #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
} pub struct TypeParamId {
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::ImplBlock> { pub parent: GenericDefId,
db.lookup_intern_impl(self) pub local_id: LocalTypeParamId,
}
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct LocalTypeParamId(RawId);
impl_arena_id!(LocalTypeParamId);
macro_rules! impl_froms { macro_rules! impl_froms {
($e:ident: $($v:ident $(($($sv:ident),*))?),*) => { ($e:ident: $($v:ident $(($($sv:ident),*))?),*) => {
$( $(
@ -314,9 +193,16 @@ macro_rules! impl_froms {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ContainerId { pub enum ContainerId {
ModuleId(ModuleId), ModuleId(ModuleId),
DefWithBodyId(DefWithBodyId),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum AssocContainerId {
ContainerId(ContainerId),
ImplId(ImplId), ImplId(ImplId),
TraitId(TraitId), TraitId(TraitId),
} }
impl_froms!(AssocContainerId: ContainerId);
/// A Data Type /// A Data Type
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
@ -459,43 +345,39 @@ pub trait HasModule {
fn module(&self, db: &impl db::DefDatabase) -> ModuleId; fn module(&self, db: &impl db::DefDatabase) -> ModuleId;
} }
impl HasModule for FunctionLoc { impl HasModule for ContainerId {
fn module(&self, db: &impl db::DefDatabase) -> ModuleId { fn module(&self, db: &impl db::DefDatabase) -> ModuleId {
match self.container { match *self {
ContainerId::ModuleId(it) => it, ContainerId::ModuleId(it) => it,
ContainerId::ImplId(it) => it.module(db), ContainerId::DefWithBodyId(it) => it.module(db),
ContainerId::TraitId(it) => it.module(db),
} }
} }
} }
impl HasModule for TypeAliasLoc { impl HasModule for AssocContainerId {
fn module(&self, db: &impl db::DefDatabase) -> ModuleId { fn module(&self, db: &impl db::DefDatabase) -> ModuleId {
match self.container { match *self {
ContainerId::ModuleId(it) => it, AssocContainerId::ContainerId(it) => it.module(db),
ContainerId::ImplId(it) => it.module(db), AssocContainerId::ImplId(it) => it.lookup(db).container.module(db),
ContainerId::TraitId(it) => it.module(db), AssocContainerId::TraitId(it) => it.lookup(db).container.module(db),
} }
} }
} }
impl HasModule for ConstLoc { impl<N: AstNode> HasModule for AssocItemLoc<N> {
fn module(&self, db: &impl db::DefDatabase) -> ModuleId { fn module(&self, db: &impl db::DefDatabase) -> ModuleId {
match self.container { self.container.module(db)
ContainerId::ModuleId(it) => it,
ContainerId::ImplId(it) => it.module(db),
ContainerId::TraitId(it) => it.module(db),
}
} }
} }
impl HasModule for AdtId { impl HasModule for AdtId {
fn module(&self, db: &impl db::DefDatabase) -> ModuleId { fn module(&self, db: &impl db::DefDatabase) -> ModuleId {
match self { match self {
AdtId::StructId(it) => it.module(db), AdtId::StructId(it) => it.lookup(db).container,
AdtId::UnionId(it) => it.module(db), AdtId::UnionId(it) => it.lookup(db).container,
AdtId::EnumId(it) => it.module(db), AdtId::EnumId(it) => it.lookup(db).container,
} }
.module(db)
} }
} }
@ -509,58 +391,22 @@ impl HasModule for DefWithBodyId {
} }
} }
impl HasModule for GenericDefId {
fn module(&self, db: &impl db::DefDatabase) -> ModuleId {
match self {
GenericDefId::FunctionId(it) => it.lookup(db).module(db),
GenericDefId::AdtId(it) => it.module(db),
GenericDefId::TraitId(it) => it.lookup(db).container.module(db),
GenericDefId::TypeAliasId(it) => it.lookup(db).module(db),
GenericDefId::ImplId(it) => it.lookup(db).container.module(db),
GenericDefId::EnumVariantId(it) => it.parent.lookup(db).container.module(db),
GenericDefId::ConstId(it) => it.lookup(db).module(db),
}
}
}
impl HasModule for StaticLoc { impl HasModule for StaticLoc {
fn module(&self, _db: &impl db::DefDatabase) -> ModuleId { fn module(&self, db: &impl db::DefDatabase) -> ModuleId {
self.container self.container.module(db)
} }
} }
pub trait HasSource {
type Value;
fn source(&self, db: &impl db::DefDatabase) -> Source<Self::Value>;
}
impl HasSource for FunctionLoc {
type Value = ast::FnDef;
fn source(&self, db: &impl db::DefDatabase) -> Source<ast::FnDef> {
let node = self.ast_id.to_node(db);
Source::new(self.ast_id.file_id(), node)
}
}
impl HasSource for TypeAliasLoc {
type Value = ast::TypeAliasDef;
fn source(&self, db: &impl db::DefDatabase) -> Source<ast::TypeAliasDef> {
let node = self.ast_id.to_node(db);
Source::new(self.ast_id.file_id(), node)
}
}
impl HasSource for ConstLoc {
type Value = ast::ConstDef;
fn source(&self, db: &impl db::DefDatabase) -> Source<ast::ConstDef> {
let node = self.ast_id.to_node(db);
Source::new(self.ast_id.file_id(), node)
}
}
impl HasSource for StaticLoc {
type Value = ast::StaticDef;
fn source(&self, db: &impl db::DefDatabase) -> Source<ast::StaticDef> {
let node = self.ast_id.to_node(db);
Source::new(self.ast_id.file_id(), node)
}
}
pub trait HasChildSource {
type ChildId;
type Value;
fn child_source(
&self,
db: &impl db::DefDatabase,
) -> Source<ArenaMap<Self::ChildId, Self::Value>>;
}

View file

@ -5,6 +5,7 @@ test_utils::marks!(
name_res_works_for_broken_modules name_res_works_for_broken_modules
can_import_enum_variant can_import_enum_variant
glob_enum glob_enum
glob_enum_group
glob_across_crates glob_across_crates
std_prelude std_prelude
macro_rules_from_other_crates_are_visible_with_macro_use macro_rules_from_other_crates_are_visible_with_macro_use

View file

@ -57,24 +57,23 @@ mod tests;
use std::sync::Arc; use std::sync::Arc;
use hir_expand::{ use hir_expand::{diagnostics::DiagnosticSink, name::Name, InFile};
ast_id_map::FileAstId, diagnostics::DiagnosticSink, either::Either, name::Name, MacroDefId,
Source,
};
use once_cell::sync::Lazy;
use ra_arena::Arena; use ra_arena::Arena;
use ra_db::{CrateId, Edition, FileId}; use ra_db::{CrateId, Edition, FileId, FilePosition};
use ra_prof::profile; use ra_prof::profile;
use ra_syntax::ast; use ra_syntax::{
ast::{self, AstNode},
SyntaxNode,
};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use crate::{ use crate::{
builtin_type::BuiltinType,
db::DefDatabase, db::DefDatabase,
item_scope::{BuiltinShadowMode, ItemScope},
nameres::{diagnostics::DefDiagnostic, path_resolution::ResolveMode}, nameres::{diagnostics::DefDiagnostic, path_resolution::ResolveMode},
path::Path, path::ModPath,
per_ns::PerNs, per_ns::PerNs,
AstId, FunctionId, ImplId, LocalImportId, LocalModuleId, ModuleDefId, ModuleId, TraitId, AstId, LocalModuleId, ModuleDefId, ModuleId,
}; };
/// Contains all top-level defs from a macro-expanded crate /// Contains all top-level defs from a macro-expanded crate
@ -100,106 +99,76 @@ impl std::ops::Index<LocalModuleId> for CrateDefMap {
} }
} }
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum ModuleOrigin {
CrateRoot {
definition: FileId,
},
/// Note that non-inline modules, by definition, live inside non-macro file.
File {
declaration: AstId<ast::Module>,
definition: FileId,
},
Inline {
definition: AstId<ast::Module>,
},
}
impl Default for ModuleOrigin {
fn default() -> Self {
ModuleOrigin::CrateRoot { definition: FileId(0) }
}
}
impl ModuleOrigin {
pub(crate) fn not_sure_file(file: Option<FileId>, declaration: AstId<ast::Module>) -> Self {
match file {
None => ModuleOrigin::Inline { definition: declaration },
Some(definition) => ModuleOrigin::File { declaration, definition },
}
}
fn declaration(&self) -> Option<AstId<ast::Module>> {
match self {
ModuleOrigin::File { declaration: module, .. }
| ModuleOrigin::Inline { definition: module, .. } => Some(*module),
ModuleOrigin::CrateRoot { .. } => None,
}
}
pub fn file_id(&self) -> Option<FileId> {
match self {
ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => {
Some(*definition)
}
_ => None,
}
}
/// Returns a node which defines this module.
/// That is, a file or a `mod foo {}` with items.
fn definition_source(&self, db: &impl DefDatabase) -> InFile<ModuleSource> {
match self {
ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => {
let file_id = *definition;
let sf = db.parse(file_id).tree();
return InFile::new(file_id.into(), ModuleSource::SourceFile(sf));
}
ModuleOrigin::Inline { definition } => {
InFile::new(definition.file_id, ModuleSource::Module(definition.to_node(db)))
}
}
}
}
#[derive(Default, Debug, PartialEq, Eq)] #[derive(Default, Debug, PartialEq, Eq)]
pub struct ModuleData { pub struct ModuleData {
pub parent: Option<LocalModuleId>, pub parent: Option<LocalModuleId>,
pub children: FxHashMap<Name, LocalModuleId>, pub children: FxHashMap<Name, LocalModuleId>,
pub scope: ModuleScope, pub scope: ItemScope,
// FIXME: these can't be both null, we need a three-state enum here. /// Where does this module come from?
/// None for root pub origin: ModuleOrigin,
pub declaration: Option<AstId<ast::Module>>,
/// None for inline modules.
///
/// Note that non-inline modules, by definition, live inside non-macro file.
pub definition: Option<FileId>,
pub impls: Vec<ImplId>,
}
#[derive(Default, Debug, PartialEq, Eq)]
pub(crate) struct Declarations {
fns: FxHashMap<FileAstId<ast::FnDef>, FunctionId>,
}
#[derive(Debug, Default, PartialEq, Eq)]
pub struct ModuleScope {
items: FxHashMap<Name, Resolution>,
/// Macros visable in current module in legacy textual scope
///
/// For macros invoked by an unquatified identifier like `bar!()`, `legacy_macros` will be searched in first.
/// If it yields no result, then it turns to module scoped `macros`.
/// It macros with name quatified with a path like `crate::foo::bar!()`, `legacy_macros` will be skipped,
/// and only normal scoped `macros` will be searched in.
///
/// Note that this automatically inherit macros defined textually before the definition of module itself.
///
/// Module scoped macros will be inserted into `items` instead of here.
// FIXME: Macro shadowing in one module is not properly handled. Non-item place macros will
// be all resolved to the last one defined if shadowing happens.
legacy_macros: FxHashMap<Name, MacroDefId>,
}
static BUILTIN_SCOPE: Lazy<FxHashMap<Name, Resolution>> = Lazy::new(|| {
BuiltinType::ALL
.iter()
.map(|(name, ty)| {
(name.clone(), Resolution { def: PerNs::types(ty.clone().into()), import: None })
})
.collect()
});
/// Legacy macros can only be accessed through special methods like `get_legacy_macros`.
/// Other methods will only resolve values, types and module scoped macros only.
impl ModuleScope {
pub fn entries<'a>(&'a self) -> impl Iterator<Item = (&'a Name, &'a Resolution)> + 'a {
//FIXME: shadowing
self.items.iter().chain(BUILTIN_SCOPE.iter())
}
pub fn declarations(&self) -> impl Iterator<Item = ModuleDefId> + '_ {
self.entries()
.filter_map(|(_name, res)| if res.import.is_none() { Some(res.def) } else { None })
.flat_map(|per_ns| {
per_ns.take_types().into_iter().chain(per_ns.take_values().into_iter())
})
}
/// Iterate over all module scoped macros
pub fn macros<'a>(&'a self) -> impl Iterator<Item = (&'a Name, MacroDefId)> + 'a {
self.items
.iter()
.filter_map(|(name, res)| res.def.take_macros().map(|macro_| (name, macro_)))
}
/// Iterate over all legacy textual scoped macros visable at the end of the module
pub fn legacy_macros<'a>(&'a self) -> impl Iterator<Item = (&'a Name, MacroDefId)> + 'a {
self.legacy_macros.iter().map(|(name, def)| (name, *def))
}
/// Get a name from current module scope, legacy macros are not included
pub fn get(&self, name: &Name) -> Option<&Resolution> {
self.items.get(name).or_else(|| BUILTIN_SCOPE.get(name))
}
pub fn traits<'a>(&'a self) -> impl Iterator<Item = TraitId> + 'a {
self.items.values().filter_map(|r| match r.def.take_types() {
Some(ModuleDefId::TraitId(t)) => Some(t),
_ => None,
})
}
fn get_legacy_macro(&self, name: &Name) -> Option<MacroDefId> {
self.legacy_macros.get(name).copied()
}
}
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct Resolution {
/// None for unresolved
pub def: PerNs,
/// ident by which this is imported into local scope.
pub import: Option<LocalImportId>,
} }
impl CrateDefMap { impl CrateDefMap {
@ -241,7 +210,7 @@ impl CrateDefMap {
pub fn modules_for_file(&self, file_id: FileId) -> impl Iterator<Item = LocalModuleId> + '_ { pub fn modules_for_file(&self, file_id: FileId) -> impl Iterator<Item = LocalModuleId> + '_ {
self.modules self.modules
.iter() .iter()
.filter(move |(_id, data)| data.definition == Some(file_id)) .filter(move |(_id, data)| data.origin.file_id() == Some(file_id))
.map(|(id, _data)| id) .map(|(id, _data)| id)
} }
@ -249,33 +218,62 @@ impl CrateDefMap {
&self, &self,
db: &impl DefDatabase, db: &impl DefDatabase,
original_module: LocalModuleId, original_module: LocalModuleId,
path: &Path, path: &ModPath,
shadow: BuiltinShadowMode,
) -> (PerNs, Option<usize>) { ) -> (PerNs, Option<usize>) {
let res = self.resolve_path_fp_with_macro(db, ResolveMode::Other, original_module, path); let res =
self.resolve_path_fp_with_macro(db, ResolveMode::Other, original_module, path, shadow);
(res.resolved_def, res.segment_index) (res.resolved_def, res.segment_index)
} }
} }
impl ModuleData { impl ModuleData {
/// Returns a node which defines this module. That is, a file or a `mod foo {}` with items. /// Returns a node which defines this module. That is, a file or a `mod foo {}` with items.
pub fn definition_source( pub fn definition_source(&self, db: &impl DefDatabase) -> InFile<ModuleSource> {
&self, self.origin.definition_source(db)
db: &impl DefDatabase,
) -> Source<Either<ast::SourceFile, ast::Module>> {
if let Some(file_id) = self.definition {
let sf = db.parse(file_id).tree();
return Source::new(file_id.into(), Either::A(sf));
}
let decl = self.declaration.unwrap();
Source::new(decl.file_id(), Either::B(decl.to_node(db)))
} }
/// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`. /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
/// `None` for the crate root. /// `None` for the crate root or block.
pub fn declaration_source(&self, db: &impl DefDatabase) -> Option<Source<ast::Module>> { pub fn declaration_source(&self, db: &impl DefDatabase) -> Option<InFile<ast::Module>> {
let decl = self.declaration?; let decl = self.origin.declaration()?;
let value = decl.to_node(db); let value = decl.to_node(db);
Some(Source { file_id: decl.file_id(), value }) Some(InFile { file_id: decl.file_id, value })
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ModuleSource {
SourceFile(ast::SourceFile),
Module(ast::Module),
}
impl ModuleSource {
// FIXME: this methods do not belong here
pub fn from_position(db: &impl DefDatabase, position: FilePosition) -> ModuleSource {
let parse = db.parse(position.file_id);
match &ra_syntax::algo::find_node_at_offset::<ast::Module>(
parse.tree().syntax(),
position.offset,
) {
Some(m) if !m.has_semi() => ModuleSource::Module(m.clone()),
_ => {
let source_file = parse.tree();
ModuleSource::SourceFile(source_file)
}
}
}
pub fn from_child_node(db: &impl DefDatabase, child: InFile<&SyntaxNode>) -> ModuleSource {
if let Some(m) =
child.value.ancestors().filter_map(ast::Module::cast).find(|it| !it.has_semi())
{
ModuleSource::Module(m)
} else {
let file_id = child.file_id.original_file(db);
let source_file = db.parse(file_id).tree();
ModuleSource::SourceFile(source_file)
}
} }
} }
@ -309,7 +307,7 @@ mod diagnostics {
} }
let decl = declaration.to_node(db); let decl = declaration.to_node(db);
sink.push(UnresolvedModule { sink.push(UnresolvedModule {
file: declaration.file_id(), file: declaration.file_id,
decl: AstPtr::new(&decl), decl: AstPtr::new(&decl),
candidate: candidate.clone(), candidate: candidate.clone(),
}) })

View file

@ -4,14 +4,15 @@
//! resolves imports and expands macros. //! resolves imports and expands macros.
use hir_expand::{ use hir_expand::{
builtin_derive::find_builtin_derive,
builtin_macro::find_builtin_macro, builtin_macro::find_builtin_macro,
name::{self, AsName, Name}, name::{name, AsName, Name},
HirFileId, MacroCallId, MacroDefId, MacroDefKind, MacroFileKind, HirFileId, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind,
}; };
use ra_cfg::CfgOptions; use ra_cfg::CfgOptions;
use ra_db::{CrateId, FileId}; use ra_db::{CrateId, FileId};
use ra_syntax::ast; use ra_syntax::ast;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::FxHashMap;
use test_utils::tested_by; use test_utils::tested_by;
use crate::{ use crate::{
@ -19,13 +20,12 @@ use crate::{
db::DefDatabase, db::DefDatabase,
nameres::{ nameres::{
diagnostics::DefDiagnostic, mod_resolution::ModDir, path_resolution::ReachedFixedPoint, diagnostics::DefDiagnostic, mod_resolution::ModDir, path_resolution::ReachedFixedPoint,
raw, CrateDefMap, ModuleData, Resolution, ResolveMode, raw, BuiltinShadowMode, CrateDefMap, ModuleData, ModuleOrigin, ResolveMode,
}, },
path::{Path, PathKind}, path::{ModPath, PathKind},
per_ns::PerNs, per_ns::PerNs,
AdtId, AstId, AstItemDef, ConstLoc, ContainerId, EnumId, EnumVariantId, FunctionLoc, ImplId, AdtId, AstId, ConstLoc, ContainerId, EnumLoc, EnumVariantId, FunctionLoc, ImplLoc, Intern,
Intern, LocalImportId, LocalModuleId, LocationCtx, ModuleDefId, ModuleId, StaticLoc, StructId, LocalModuleId, ModuleDefId, ModuleId, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc,
TraitId, TypeAliasLoc, UnionId,
}; };
pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap { pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap {
@ -57,68 +57,63 @@ pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> C
def_map, def_map,
glob_imports: FxHashMap::default(), glob_imports: FxHashMap::default(),
unresolved_imports: Vec::new(), unresolved_imports: Vec::new(),
resolved_imports: Vec::new(),
unexpanded_macros: Vec::new(), unexpanded_macros: Vec::new(),
unexpanded_attribute_macros: Vec::new(),
mod_dirs: FxHashMap::default(), mod_dirs: FxHashMap::default(),
macro_stack_monitor: MacroStackMonitor::default(),
poison_macros: FxHashSet::default(),
cfg_options, cfg_options,
}; };
collector.collect(); collector.collect();
collector.finish() collector.finish()
} }
#[derive(Default)] #[derive(Copy, Clone, Debug, Eq, PartialEq)]
struct MacroStackMonitor { enum PartialResolvedImport {
counts: FxHashMap<MacroDefId, u32>, /// None of any namespaces is resolved
Unresolved,
/// Mainly use for test /// One of namespaces is resolved
validator: Option<Box<dyn Fn(u32) -> bool>>, Indeterminate(PerNs),
/// All namespaces are resolved, OR it is came from other crate
Resolved(PerNs),
} }
impl MacroStackMonitor { impl PartialResolvedImport {
fn increase(&mut self, macro_def_id: MacroDefId) { fn namespaces(&self) -> PerNs {
*self.counts.entry(macro_def_id).or_default() += 1; match self {
} PartialResolvedImport::Unresolved => PerNs::none(),
PartialResolvedImport::Indeterminate(ns) => *ns,
fn decrease(&mut self, macro_def_id: MacroDefId) { PartialResolvedImport::Resolved(ns) => *ns,
*self.counts.entry(macro_def_id).or_default() -= 1;
}
fn is_poison(&self, macro_def_id: MacroDefId) -> bool {
let cur = *self.counts.get(&macro_def_id).unwrap_or(&0);
if let Some(validator) = &self.validator {
validator(cur)
} else {
cur > 100
} }
} }
} }
#[derive(Clone, Debug, Eq, PartialEq)]
struct ImportDirective {
module_id: LocalModuleId,
import_id: raw::Import,
import: raw::ImportData,
status: PartialResolvedImport,
}
#[derive(Clone, Debug, Eq, PartialEq)]
struct MacroDirective {
module_id: LocalModuleId,
ast_id: AstId<ast::MacroCall>,
path: ModPath,
legacy: Option<MacroCallId>,
}
/// Walks the tree of module recursively /// Walks the tree of module recursively
struct DefCollector<'a, DB> { struct DefCollector<'a, DB> {
db: &'a DB, db: &'a DB,
def_map: CrateDefMap, def_map: CrateDefMap,
glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, LocalImportId)>>, glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, raw::Import)>>,
unresolved_imports: Vec<(LocalModuleId, LocalImportId, raw::ImportData)>, unresolved_imports: Vec<ImportDirective>,
unexpanded_macros: Vec<(LocalModuleId, AstId<ast::MacroCall>, Path)>, resolved_imports: Vec<ImportDirective>,
unexpanded_macros: Vec<MacroDirective>,
unexpanded_attribute_macros: Vec<(LocalModuleId, AstId<ast::ModuleItem>, ModPath)>,
mod_dirs: FxHashMap<LocalModuleId, ModDir>, mod_dirs: FxHashMap<LocalModuleId, ModDir>,
/// Some macro use `$tt:tt which mean we have to handle the macro perfectly
/// To prevent stack overflow, we add a deep counter here for prevent that.
macro_stack_monitor: MacroStackMonitor,
/// Some macros are not well-behavior, which leads to infinite loop
/// e.g. macro_rules! foo { ($ty:ty) => { foo!($ty); } }
/// We mark it down and skip it in collector
///
/// FIXME:
/// Right now it only handle a poison macro in a single crate,
/// such that if other crate try to call that macro,
/// the whole process will do again until it became poisoned in that crate.
/// We should handle this macro set globally
/// However, do we want to put it as a global variable?
poison_macros: FxHashSet<MacroDefId>,
cfg_options: &'a CfgOptions, cfg_options: &'a CfgOptions,
} }
@ -131,7 +126,7 @@ where
let file_id = crate_graph.crate_root(self.def_map.krate); let file_id = crate_graph.crate_root(self.def_map.krate);
let raw_items = self.db.raw_items(file_id.into()); let raw_items = self.db.raw_items(file_id.into());
let module_id = self.def_map.root; let module_id = self.def_map.root;
self.def_map.modules[module_id].definition = Some(file_id); self.def_map.modules[module_id].origin = ModuleOrigin::CrateRoot { definition: file_id };
ModCollector { ModCollector {
def_collector: &mut *self, def_collector: &mut *self,
module_id, module_id,
@ -145,9 +140,11 @@ where
let mut i = 0; let mut i = 0;
loop { loop {
self.db.check_canceled(); self.db.check_canceled();
match (self.resolve_imports(), self.resolve_macros()) { self.resolve_imports();
(ReachedFixedPoint::Yes, ReachedFixedPoint::Yes) => break,
_ => i += 1, match self.resolve_macros() {
ReachedFixedPoint::Yes => break,
ReachedFixedPoint::No => i += 1,
} }
if i == 1000 { if i == 1000 {
log::error!("name resolution is stuck"); log::error!("name resolution is stuck");
@ -155,10 +152,26 @@ where
} }
} }
// Resolve all indeterminate resolved imports again
// As some of the macros will expand newly import shadowing partial resolved imports
// FIXME: We maybe could skip this, if we handle the Indetermine imports in `resolve_imports`
// correctly
let partial_resolved = self.resolved_imports.iter().filter_map(|directive| {
if let PartialResolvedImport::Indeterminate(_) = directive.status {
let mut directive = directive.clone();
directive.status = PartialResolvedImport::Unresolved;
Some(directive)
} else {
None
}
});
self.unresolved_imports.extend(partial_resolved);
self.resolve_imports();
let unresolved_imports = std::mem::replace(&mut self.unresolved_imports, Vec::new()); let unresolved_imports = std::mem::replace(&mut self.unresolved_imports, Vec::new());
// show unresolved imports in completion, etc // show unresolved imports in completion, etc
for (module_id, import, import_data) in unresolved_imports { for directive in unresolved_imports {
self.record_resolved_import(module_id, PerNs::none(), import, &import_data) self.record_resolved_import(&directive)
} }
} }
@ -201,24 +214,20 @@ where
// In Rust, `#[macro_export]` macros are unconditionally visible at the // In Rust, `#[macro_export]` macros are unconditionally visible at the
// crate root, even if the parent modules is **not** visible. // crate root, even if the parent modules is **not** visible.
if export { if export {
self.update( self.update(self.def_map.root, &[(name, PerNs::macros(macro_))]);
self.def_map.root,
None,
&[(name, Resolution { def: PerNs::macros(macro_), import: None })],
);
} }
} }
/// Define a legacy textual scoped macro in module /// Define a legacy textual scoped macro in module
/// ///
/// We use a map `legacy_macros` to store all legacy textual scoped macros visable per module. /// We use a map `legacy_macros` to store all legacy textual scoped macros visible per module.
/// It will clone all macros from parent legacy scope, whose definition is prior to /// It will clone all macros from parent legacy scope, whose definition is prior to
/// the definition of current module. /// the definition of current module.
/// And also, `macro_use` on a module will import all legacy macros visable inside to /// And also, `macro_use` on a module will import all legacy macros visible inside to
/// current legacy scope, with possible shadowing. /// current legacy scope, with possible shadowing.
fn define_legacy_macro(&mut self, module_id: LocalModuleId, name: Name, macro_: MacroDefId) { fn define_legacy_macro(&mut self, module_id: LocalModuleId, name: Name, mac: MacroDefId) {
// Always shadowing // Always shadowing
self.def_map.modules[module_id].scope.legacy_macros.insert(name, macro_); self.def_map.modules[module_id].scope.define_legacy_macro(name, mac);
} }
/// Import macros from `#[macro_use] extern crate`. /// Import macros from `#[macro_use] extern crate`.
@ -259,31 +268,43 @@ where
} }
} }
fn resolve_imports(&mut self) -> ReachedFixedPoint { /// Import resolution
let mut imports = std::mem::replace(&mut self.unresolved_imports, Vec::new()); ///
let mut resolved = Vec::new(); /// This is a fix point algorithm. We resolve imports until no forward
imports.retain(|(module_id, import, import_data)| { /// progress in resolving imports is made
let (def, fp) = self.resolve_import(*module_id, import_data); fn resolve_imports(&mut self) {
if fp == ReachedFixedPoint::Yes { let mut n_previous_unresolved = self.unresolved_imports.len() + 1;
resolved.push((*module_id, def, *import, import_data.clone()))
while self.unresolved_imports.len() < n_previous_unresolved {
n_previous_unresolved = self.unresolved_imports.len();
let imports = std::mem::replace(&mut self.unresolved_imports, Vec::new());
for mut directive in imports {
directive.status = self.resolve_import(directive.module_id, &directive.import);
match directive.status {
PartialResolvedImport::Indeterminate(_) => {
self.record_resolved_import(&directive);
// FIXME: For avoid performance regression,
// we consider an imported resolved if it is indeterminate (i.e not all namespace resolved)
self.resolved_imports.push(directive)
}
PartialResolvedImport::Resolved(_) => {
self.record_resolved_import(&directive);
self.resolved_imports.push(directive)
}
PartialResolvedImport::Unresolved => {
self.unresolved_imports.push(directive);
}
}
} }
fp == ReachedFixedPoint::No
});
self.unresolved_imports = imports;
// Resolves imports, filling-in module scopes
let result =
if resolved.is_empty() { ReachedFixedPoint::Yes } else { ReachedFixedPoint::No };
for (module_id, def, import, import_data) in resolved {
self.record_resolved_import(module_id, def, import, &import_data)
} }
result
} }
fn resolve_import( fn resolve_import(
&self, &self,
module_id: LocalModuleId, module_id: LocalModuleId,
import: &raw::ImportData, import: &raw::ImportData,
) -> (PerNs, ReachedFixedPoint) { ) -> PartialResolvedImport {
log::debug!("resolving import: {:?} ({:?})", import, self.def_map.edition); log::debug!("resolving import: {:?} ({:?})", import, self.def_map.edition);
if import.is_extern_crate { if import.is_extern_crate {
let res = self.def_map.resolve_name_in_extern_prelude( let res = self.def_map.resolve_name_in_extern_prelude(
@ -292,26 +313,45 @@ where
.as_ident() .as_ident()
.expect("extern crate should have been desugared to one-element path"), .expect("extern crate should have been desugared to one-element path"),
); );
(res, ReachedFixedPoint::Yes) PartialResolvedImport::Resolved(res)
} else { } else {
let res = self.def_map.resolve_path_fp_with_macro( let res = self.def_map.resolve_path_fp_with_macro(
self.db, self.db,
ResolveMode::Import, ResolveMode::Import,
module_id, module_id,
&import.path, &import.path,
BuiltinShadowMode::Module,
); );
(res.resolved_def, res.reached_fixedpoint) let def = res.resolved_def;
if res.reached_fixedpoint == ReachedFixedPoint::No {
return PartialResolvedImport::Unresolved;
}
if let Some(krate) = res.krate {
if krate != self.def_map.krate {
return PartialResolvedImport::Resolved(def);
}
}
// Check whether all namespace is resolved
if def.take_types().is_some()
&& def.take_values().is_some()
&& def.take_macros().is_some()
{
PartialResolvedImport::Resolved(def)
} else {
PartialResolvedImport::Indeterminate(def)
}
} }
} }
fn record_resolved_import( fn record_resolved_import(&mut self, directive: &ImportDirective) {
&mut self, let module_id = directive.module_id;
module_id: LocalModuleId, let import_id = directive.import_id;
def: PerNs, let import = &directive.import;
import_id: LocalImportId, let def = directive.status.namespaces();
import: &raw::ImportData,
) {
if import.is_glob { if import.is_glob {
log::debug!("glob import: {:?}", import); log::debug!("glob import: {:?}", import);
match def.take_types() { match def.take_types() {
@ -326,13 +366,9 @@ where
let scope = &item_map[m.local_id].scope; let scope = &item_map[m.local_id].scope;
// Module scoped macros is included // Module scoped macros is included
let items = scope let items = scope.collect_resolutions();
.items
.iter()
.map(|(name, res)| (name.clone(), res.clone()))
.collect::<Vec<_>>();
self.update(module_id, Some(import_id), &items); self.update(module_id, &items);
} else { } else {
// glob import from same crate => we do an initial // glob import from same crate => we do an initial
// import, and then need to propagate any further // import, and then need to propagate any further
@ -340,18 +376,14 @@ where
let scope = &self.def_map[m.local_id].scope; let scope = &self.def_map[m.local_id].scope;
// Module scoped macros is included // Module scoped macros is included
let items = scope let items = scope.collect_resolutions();
.items
.iter()
.map(|(name, res)| (name.clone(), res.clone()))
.collect::<Vec<_>>();
self.update(module_id, Some(import_id), &items); self.update(module_id, &items);
// record the glob import in case we add further items // record the glob import in case we add further items
self.glob_imports let glob = self.glob_imports.entry(m.local_id).or_default();
.entry(m.local_id) if !glob.iter().any(|it| *it == (module_id, import_id)) {
.or_default() glob.push((module_id, import_id));
.push((module_id, import_id)); }
} }
} }
Some(ModuleDefId::AdtId(AdtId::EnumId(e))) => { Some(ModuleDefId::AdtId(AdtId::EnumId(e))) => {
@ -361,17 +393,14 @@ where
let resolutions = enum_data let resolutions = enum_data
.variants .variants
.iter() .iter()
.filter_map(|(local_id, variant_data)| { .map(|(local_id, variant_data)| {
let name = variant_data.name.clone(); let name = variant_data.name.clone();
let variant = EnumVariantId { parent: e, local_id }; let variant = EnumVariantId { parent: e, local_id };
let res = Resolution { let res = PerNs::both(variant.into(), variant.into());
def: PerNs::both(variant.into(), variant.into()), (name, res)
import: Some(import_id),
};
Some((name, res))
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
self.update(module_id, Some(import_id), &resolutions); self.update(module_id, &resolutions);
} }
Some(d) => { Some(d) => {
log::debug!("glob import {:?} from non-module/enum {:?}", import, d); log::debug!("glob import {:?} from non-module/enum {:?}", import, d);
@ -383,7 +412,7 @@ where
} else { } else {
match import.path.segments.last() { match import.path.segments.last() {
Some(last_segment) => { Some(last_segment) => {
let name = import.alias.clone().unwrap_or_else(|| last_segment.name.clone()); let name = import.alias.clone().unwrap_or_else(|| last_segment.clone());
log::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def); log::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def);
// extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658 // extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658
@ -393,62 +422,31 @@ where
} }
} }
let resolution = Resolution { def, import: Some(import_id) }; self.update(module_id, &[(name, def)]);
self.update(module_id, Some(import_id), &[(name, resolution)]);
} }
None => tested_by!(bogus_paths), None => tested_by!(bogus_paths),
} }
} }
} }
fn update( fn update(&mut self, module_id: LocalModuleId, resolutions: &[(Name, PerNs)]) {
&mut self, self.update_recursive(module_id, resolutions, 0)
module_id: LocalModuleId,
import: Option<LocalImportId>,
resolutions: &[(Name, Resolution)],
) {
self.update_recursive(module_id, import, resolutions, 0)
} }
fn update_recursive( fn update_recursive(
&mut self, &mut self,
module_id: LocalModuleId, module_id: LocalModuleId,
import: Option<LocalImportId>, resolutions: &[(Name, PerNs)],
resolutions: &[(Name, Resolution)],
depth: usize, depth: usize,
) { ) {
if depth > 100 { if depth > 100 {
// prevent stack overflows (but this shouldn't be possible) // prevent stack overflows (but this shouldn't be possible)
panic!("infinite recursion in glob imports!"); panic!("infinite recursion in glob imports!");
} }
let module_items = &mut self.def_map.modules[module_id].scope; let scope = &mut self.def_map.modules[module_id].scope;
let mut changed = false; let mut changed = false;
for (name, res) in resolutions { for (name, res) in resolutions {
let existing = module_items.items.entry(name.clone()).or_default(); changed |= scope.push_res(name.clone(), *res);
if existing.def.types.is_none() && res.def.types.is_some() {
existing.def.types = res.def.types;
existing.import = import.or(res.import);
changed = true;
}
if existing.def.values.is_none() && res.def.values.is_some() {
existing.def.values = res.def.values;
existing.import = import.or(res.import);
changed = true;
}
if existing.def.macros.is_none() && res.def.macros.is_some() {
existing.def.macros = res.def.macros;
existing.import = import.or(res.import);
changed = true;
}
if existing.def.is_none()
&& res.def.is_none()
&& existing.import.is_none()
&& res.import.is_some()
{
existing.import = res.import;
}
} }
if !changed { if !changed {
@ -461,27 +459,48 @@ where
.flat_map(|v| v.iter()) .flat_map(|v| v.iter())
.cloned() .cloned()
.collect::<Vec<_>>(); .collect::<Vec<_>>();
for (glob_importing_module, glob_import) in glob_imports { for (glob_importing_module, _glob_import) in glob_imports {
// We pass the glob import so that the tracked import in those modules is that glob import // We pass the glob import so that the tracked import in those modules is that glob import
self.update_recursive(glob_importing_module, Some(glob_import), resolutions, depth + 1); self.update_recursive(glob_importing_module, resolutions, depth + 1);
} }
} }
fn resolve_macros(&mut self) -> ReachedFixedPoint { fn resolve_macros(&mut self) -> ReachedFixedPoint {
let mut macros = std::mem::replace(&mut self.unexpanded_macros, Vec::new()); let mut macros = std::mem::replace(&mut self.unexpanded_macros, Vec::new());
let mut attribute_macros =
std::mem::replace(&mut self.unexpanded_attribute_macros, Vec::new());
let mut resolved = Vec::new(); let mut resolved = Vec::new();
let mut res = ReachedFixedPoint::Yes; let mut res = ReachedFixedPoint::Yes;
macros.retain(|(module_id, ast_id, path)| { macros.retain(|directive| {
if let Some(call_id) = directive.legacy {
res = ReachedFixedPoint::No;
resolved.push((directive.module_id, call_id));
return false;
}
let resolved_res = self.def_map.resolve_path_fp_with_macro( let resolved_res = self.def_map.resolve_path_fp_with_macro(
self.db, self.db,
ResolveMode::Other, ResolveMode::Other,
*module_id, directive.module_id,
path, &directive.path,
BuiltinShadowMode::Module,
); );
if let Some(def) = resolved_res.resolved_def.take_macros() { if let Some(def) = resolved_res.resolved_def.take_macros() {
let call_id = def.as_call_id(self.db, *ast_id); let call_id = def.as_call_id(self.db, MacroCallKind::FnLike(directive.ast_id));
resolved.push((*module_id, call_id, def)); resolved.push((directive.module_id, call_id));
res = ReachedFixedPoint::No;
return false;
}
true
});
attribute_macros.retain(|(module_id, ast_id, path)| {
let resolved_res = self.resolve_attribute_macro(path);
if let Some(def) = resolved_res {
let call_id = def.as_call_id(self.db, MacroCallKind::Attr(*ast_id));
resolved.push((*module_id, call_id));
res = ReachedFixedPoint::No; res = ReachedFixedPoint::No;
return false; return false;
} }
@ -490,44 +509,41 @@ where
}); });
self.unexpanded_macros = macros; self.unexpanded_macros = macros;
self.unexpanded_attribute_macros = attribute_macros;
for (module_id, macro_call_id, macro_def_id) in resolved { for (module_id, macro_call_id) in resolved {
self.collect_macro_expansion(module_id, macro_call_id, macro_def_id); self.collect_macro_expansion(module_id, macro_call_id);
} }
res res
} }
fn collect_macro_expansion( fn resolve_attribute_macro(&self, path: &ModPath) -> Option<MacroDefId> {
&mut self, // FIXME this is currently super hacky, just enough to support the
module_id: LocalModuleId, // built-in derives
macro_call_id: MacroCallId, if let Some(name) = path.as_ident() {
macro_def_id: MacroDefId, // FIXME this should actually be handled with the normal name
) { // resolution; the std lib defines built-in stubs for the derives,
if self.poison_macros.contains(&macro_def_id) { // but these are new-style `macro`s, which we don't support yet
return; if let Some(def_id) = find_builtin_derive(name) {
} return Some(def_id);
self.macro_stack_monitor.increase(macro_def_id);
if !self.macro_stack_monitor.is_poison(macro_def_id) {
let file_id: HirFileId = macro_call_id.as_file(MacroFileKind::Items);
let raw_items = self.db.raw_items(file_id);
let mod_dir = self.mod_dirs[&module_id].clone();
ModCollector {
def_collector: &mut *self,
file_id,
module_id,
raw_items: &raw_items,
mod_dir,
} }
.collect(raw_items.items());
} else {
log::error!("Too deep macro expansion: {:?}", macro_call_id);
self.poison_macros.insert(macro_def_id);
} }
None
}
self.macro_stack_monitor.decrease(macro_def_id); fn collect_macro_expansion(&mut self, module_id: LocalModuleId, macro_call_id: MacroCallId) {
let file_id: HirFileId = macro_call_id.as_file();
let raw_items = self.db.raw_items(file_id);
let mod_dir = self.mod_dirs[&module_id].clone();
ModCollector {
def_collector: &mut *self,
file_id,
module_id,
raw_items: &raw_items,
mod_dir,
}
.collect(raw_items.items());
} }
fn finish(self) -> CrateDefMap { fn finish(self) -> CrateDefMap {
@ -581,20 +597,31 @@ where
raw::RawItemKind::Module(m) => { raw::RawItemKind::Module(m) => {
self.collect_module(&self.raw_items[m], &item.attrs) self.collect_module(&self.raw_items[m], &item.attrs)
} }
raw::RawItemKind::Import(import_id) => self raw::RawItemKind::Import(import_id) => {
.def_collector self.def_collector.unresolved_imports.push(ImportDirective {
.unresolved_imports module_id: self.module_id,
.push((self.module_id, import_id, self.raw_items[import_id].clone())), import_id,
raw::RawItemKind::Def(def) => self.define_def(&self.raw_items[def]), import: self.raw_items[import_id].clone(),
status: PartialResolvedImport::Unresolved,
})
}
raw::RawItemKind::Def(def) => {
self.define_def(&self.raw_items[def], &item.attrs)
}
raw::RawItemKind::Macro(mac) => self.collect_macro(&self.raw_items[mac]), raw::RawItemKind::Macro(mac) => self.collect_macro(&self.raw_items[mac]),
raw::RawItemKind::Impl(imp) => { raw::RawItemKind::Impl(imp) => {
let module = ModuleId { let module = ModuleId {
krate: self.def_collector.def_map.krate, krate: self.def_collector.def_map.krate,
local_id: self.module_id, local_id: self.module_id,
}; };
let ctx = LocationCtx::new(self.def_collector.db, module, self.file_id); let container = ContainerId::ModuleId(module);
let imp_id = ImplId::from_ast_id(ctx, self.raw_items[imp].ast_id); let ast_id = self.raw_items[imp].ast_id;
self.def_collector.def_map.modules[self.module_id].impls.push(imp_id) let impl_id =
ImplLoc { container, ast_id: AstId::new(self.file_id, ast_id) }
.intern(self.def_collector.db);
self.def_collector.def_map.modules[self.module_id]
.scope
.define_impl(impl_id)
} }
} }
} }
@ -667,72 +694,91 @@ where
let modules = &mut self.def_collector.def_map.modules; let modules = &mut self.def_collector.def_map.modules;
let res = modules.alloc(ModuleData::default()); let res = modules.alloc(ModuleData::default());
modules[res].parent = Some(self.module_id); modules[res].parent = Some(self.module_id);
modules[res].declaration = Some(declaration); modules[res].origin = ModuleOrigin::not_sure_file(definition, declaration);
modules[res].definition = definition; for (name, mac) in modules[self.module_id].scope.collect_legacy_macros() {
modules[res].scope.legacy_macros = modules[self.module_id].scope.legacy_macros.clone(); modules[res].scope.define_legacy_macro(name, mac)
}
modules[self.module_id].children.insert(name.clone(), res); modules[self.module_id].children.insert(name.clone(), res);
let resolution = Resolution { let module = ModuleId { krate: self.def_collector.def_map.krate, local_id: res };
def: PerNs::types( let def: ModuleDefId = module.into();
ModuleId { krate: self.def_collector.def_map.krate, local_id: res }.into(), self.def_collector.def_map.modules[self.module_id].scope.define_def(def);
), self.def_collector.update(self.module_id, &[(name, def.into())]);
import: None,
};
self.def_collector.update(self.module_id, None, &[(name, resolution)]);
res res
} }
fn define_def(&mut self, def: &raw::DefData) { fn define_def(&mut self, def: &raw::DefData, attrs: &Attrs) {
let module = ModuleId { krate: self.def_collector.def_map.krate, local_id: self.module_id }; let module = ModuleId { krate: self.def_collector.def_map.krate, local_id: self.module_id };
let ctx = LocationCtx::new(self.def_collector.db, module, self.file_id); // FIXME: check attrs to see if this is an attribute macro invocation;
// in which case we don't add the invocation, just a single attribute
// macro invocation
self.collect_derives(attrs, def);
let name = def.name.clone(); let name = def.name.clone();
let def: PerNs = match def.kind { let container = ContainerId::ModuleId(module);
raw::DefKind::Function(ast_id) => { let def: ModuleDefId = match def.kind {
let def = FunctionLoc { raw::DefKind::Function(ast_id) => FunctionLoc {
container: ContainerId::ModuleId(module), container: container.into(),
ast_id: AstId::new(self.file_id, ast_id), ast_id: AstId::new(self.file_id, ast_id),
}
.intern(self.def_collector.db);
PerNs::values(def.into())
} }
.intern(self.def_collector.db)
.into(),
raw::DefKind::Struct(ast_id) => { raw::DefKind::Struct(ast_id) => {
let id = StructId::from_ast_id(ctx, ast_id).into(); StructLoc { container, ast_id: AstId::new(self.file_id, ast_id) }
PerNs::both(id, id) .intern(self.def_collector.db)
.into()
} }
raw::DefKind::Union(ast_id) => { raw::DefKind::Union(ast_id) => {
let id = UnionId::from_ast_id(ctx, ast_id).into(); UnionLoc { container, ast_id: AstId::new(self.file_id, ast_id) }
PerNs::both(id, id) .intern(self.def_collector.db)
.into()
}
raw::DefKind::Enum(ast_id) => {
EnumLoc { container, ast_id: AstId::new(self.file_id, ast_id) }
.intern(self.def_collector.db)
.into()
} }
raw::DefKind::Enum(ast_id) => PerNs::types(EnumId::from_ast_id(ctx, ast_id).into()),
raw::DefKind::Const(ast_id) => { raw::DefKind::Const(ast_id) => {
let def = ConstLoc { ConstLoc { container: container.into(), ast_id: AstId::new(self.file_id, ast_id) }
container: ContainerId::ModuleId(module), .intern(self.def_collector.db)
ast_id: AstId::new(self.file_id, ast_id), .into()
}
.intern(self.def_collector.db);
PerNs::values(def.into())
} }
raw::DefKind::Static(ast_id) => { raw::DefKind::Static(ast_id) => {
let def = StaticLoc { container: module, ast_id: AstId::new(self.file_id, ast_id) } StaticLoc { container, ast_id: AstId::new(self.file_id, ast_id) }
.intern(self.def_collector.db); .intern(self.def_collector.db)
.into()
PerNs::values(def.into())
} }
raw::DefKind::Trait(ast_id) => PerNs::types(TraitId::from_ast_id(ctx, ast_id).into()), raw::DefKind::Trait(ast_id) => {
raw::DefKind::TypeAlias(ast_id) => { TraitLoc { container, ast_id: AstId::new(self.file_id, ast_id) }
let def = TypeAliasLoc { .intern(self.def_collector.db)
container: ContainerId::ModuleId(module), .into()
ast_id: AstId::new(self.file_id, ast_id),
}
.intern(self.def_collector.db);
PerNs::types(def.into())
} }
raw::DefKind::TypeAlias(ast_id) => TypeAliasLoc {
container: container.into(),
ast_id: AstId::new(self.file_id, ast_id),
}
.intern(self.def_collector.db)
.into(),
}; };
let resolution = Resolution { def, import: None }; self.def_collector.def_map.modules[self.module_id].scope.define_def(def);
self.def_collector.update(self.module_id, None, &[(name, resolution)]) self.def_collector.update(self.module_id, &[(name, def.into())])
}
fn collect_derives(&mut self, attrs: &Attrs, def: &raw::DefData) {
for derive_subtree in attrs.by_key("derive").tt_values() {
// for #[derive(Copy, Clone)], `derive_subtree` is the `(Copy, Clone)` subtree
for tt in &derive_subtree.token_trees {
let ident = match &tt {
tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => ident,
tt::TokenTree::Leaf(tt::Leaf::Punct(_)) => continue, // , is ok
_ => continue, // anything else would be an error (which we currently ignore)
};
let path = ModPath::from_tt_ident(ident);
let ast_id = AstId::new(self.file_id, def.kind.ast_id());
self.def_collector.unexpanded_attribute_macros.push((self.module_id, ast_id, path));
}
}
} }
fn collect_macro(&mut self, mac: &raw::MacroData) { fn collect_macro(&mut self, mac: &raw::MacroData) {
@ -758,8 +804,8 @@ where
if is_macro_rules(&mac.path) { if is_macro_rules(&mac.path) {
if let Some(name) = &mac.name { if let Some(name) = &mac.name {
let macro_id = MacroDefId { let macro_id = MacroDefId {
ast_id, ast_id: Some(ast_id),
krate: self.def_collector.def_map.krate, krate: Some(self.def_collector.def_map.krate),
kind: MacroDefKind::Declarative, kind: MacroDefKind::Declarative,
}; };
self.def_collector.define_macro(self.module_id, name.clone(), macro_id, mac.export); self.def_collector.define_macro(self.module_id, name.clone(), macro_id, mac.export);
@ -767,14 +813,20 @@ where
return; return;
} }
// Case 2: try to resolve in legacy scope and expand macro_rules, triggering // Case 2: try to resolve in legacy scope and expand macro_rules
// recursive item collection.
if let Some(macro_def) = mac.path.as_ident().and_then(|name| { if let Some(macro_def) = mac.path.as_ident().and_then(|name| {
self.def_collector.def_map[self.module_id].scope.get_legacy_macro(&name) self.def_collector.def_map[self.module_id].scope.get_legacy_macro(&name)
}) { }) {
let macro_call_id = macro_def.as_call_id(self.def_collector.db, ast_id); let macro_call_id =
macro_def.as_call_id(self.def_collector.db, MacroCallKind::FnLike(ast_id));
self.def_collector.unexpanded_macros.push(MacroDirective {
module_id: self.module_id,
path: mac.path.clone(),
ast_id,
legacy: Some(macro_call_id),
});
self.def_collector.collect_macro_expansion(self.module_id, macro_call_id, macro_def);
return; return;
} }
@ -782,13 +834,19 @@ where
// We rewrite simple path `macro_name` to `self::macro_name` to force resolve in module scope only. // We rewrite simple path `macro_name` to `self::macro_name` to force resolve in module scope only.
let mut path = mac.path.clone(); let mut path = mac.path.clone();
if path.is_ident() { if path.is_ident() {
path.kind = PathKind::Self_; path.kind = PathKind::Super(0);
} }
self.def_collector.unexpanded_macros.push((self.module_id, ast_id, path));
self.def_collector.unexpanded_macros.push(MacroDirective {
module_id: self.module_id,
path,
ast_id,
legacy: None,
});
} }
fn import_all_legacy_macros(&mut self, module_id: LocalModuleId) { fn import_all_legacy_macros(&mut self, module_id: LocalModuleId) {
let macros = self.def_collector.def_map[module_id].scope.legacy_macros.clone(); let macros = self.def_collector.def_map[module_id].scope.collect_legacy_macros();
for (name, macro_) in macros { for (name, macro_) in macros {
self.def_collector.define_legacy_macro(self.module_id, name.clone(), macro_); self.def_collector.define_legacy_macro(self.module_id, name.clone(), macro_);
} }
@ -803,45 +861,35 @@ where
} }
} }
fn is_macro_rules(path: &Path) -> bool { fn is_macro_rules(path: &ModPath) -> bool {
path.as_ident() == Some(&name::MACRO_RULES) path.as_ident() == Some(&name![macro_rules])
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::{db::DefDatabase, test_db::TestDB};
use ra_arena::Arena; use ra_arena::Arena;
use ra_db::{fixture::WithFixture, SourceDatabase}; use ra_db::{fixture::WithFixture, SourceDatabase};
use rustc_hash::FxHashSet;
use crate::{db::DefDatabase, test_db::TestDB};
use super::*; use super::*;
fn do_collect_defs( fn do_collect_defs(db: &impl DefDatabase, def_map: CrateDefMap) -> CrateDefMap {
db: &impl DefDatabase,
def_map: CrateDefMap,
monitor: MacroStackMonitor,
) -> (CrateDefMap, FxHashSet<MacroDefId>) {
let mut collector = DefCollector { let mut collector = DefCollector {
db, db,
def_map, def_map,
glob_imports: FxHashMap::default(), glob_imports: FxHashMap::default(),
unresolved_imports: Vec::new(), unresolved_imports: Vec::new(),
resolved_imports: Vec::new(),
unexpanded_macros: Vec::new(), unexpanded_macros: Vec::new(),
unexpanded_attribute_macros: Vec::new(),
mod_dirs: FxHashMap::default(), mod_dirs: FxHashMap::default(),
macro_stack_monitor: monitor,
poison_macros: FxHashSet::default(),
cfg_options: &CfgOptions::default(), cfg_options: &CfgOptions::default(),
}; };
collector.collect(); collector.collect();
(collector.def_map, collector.poison_macros) collector.def_map
} }
fn do_limited_resolve( fn do_resolve(code: &str) -> CrateDefMap {
code: &str,
limit: u32,
poison_limit: u32,
) -> (CrateDefMap, FxHashSet<MacroDefId>) {
let (db, _file_id) = TestDB::with_single_file(&code); let (db, _file_id) = TestDB::with_single_file(&code);
let krate = db.test_crate(); let krate = db.test_crate();
@ -859,59 +907,18 @@ mod tests {
diagnostics: Vec::new(), diagnostics: Vec::new(),
} }
}; };
do_collect_defs(&db, def_map)
let mut monitor = MacroStackMonitor::default();
monitor.validator = Some(Box::new(move |count| {
assert!(count < limit);
count >= poison_limit
}));
do_collect_defs(&db, def_map, monitor)
} }
#[test] #[test]
fn test_macro_expand_limit_width() { fn test_macro_expand_will_stop() {
do_limited_resolve( do_resolve(
r#" r#"
macro_rules! foo { macro_rules! foo {
($($ty:ty)*) => { foo!($($ty)*, $($ty)*); } ($($ty:ty)*) => { foo!($($ty)*, $($ty)*); }
} }
foo!(KABOOM); foo!(KABOOM);
"#, "#,
16,
1000,
); );
} }
#[test]
fn test_macro_expand_poisoned() {
let (_, poison_macros) = do_limited_resolve(
r#"
macro_rules! foo {
($ty:ty) => { foo!($ty); }
}
foo!(KABOOM);
"#,
100,
16,
);
assert_eq!(poison_macros.len(), 1);
}
#[test]
fn test_macro_expand_normal() {
let (_, poison_macros) = do_limited_resolve(
r#"
macro_rules! foo {
($ident:ident) => { struct $ident {} }
}
foo!(Bar);
"#,
16,
16,
);
assert_eq!(poison_macros.len(), 0);
}
} }

View file

@ -10,16 +10,18 @@
//! //!
//! `ReachedFixedPoint` signals about this. //! `ReachedFixedPoint` signals about this.
use std::iter::successors;
use hir_expand::name::Name; use hir_expand::name::Name;
use ra_db::Edition; use ra_db::Edition;
use test_utils::tested_by; use test_utils::tested_by;
use crate::{ use crate::{
db::DefDatabase, db::DefDatabase,
nameres::CrateDefMap, nameres::{BuiltinShadowMode, CrateDefMap},
path::{Path, PathKind}, path::{ModPath, PathKind},
per_ns::PerNs, per_ns::PerNs,
AdtId, EnumVariantId, LocalModuleId, ModuleDefId, ModuleId, AdtId, CrateId, EnumVariantId, LocalModuleId, ModuleDefId, ModuleId,
}; };
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
@ -39,19 +41,21 @@ pub(super) struct ResolvePathResult {
pub(super) resolved_def: PerNs, pub(super) resolved_def: PerNs,
pub(super) segment_index: Option<usize>, pub(super) segment_index: Option<usize>,
pub(super) reached_fixedpoint: ReachedFixedPoint, pub(super) reached_fixedpoint: ReachedFixedPoint,
pub(super) krate: Option<CrateId>,
} }
impl ResolvePathResult { impl ResolvePathResult {
fn empty(reached_fixedpoint: ReachedFixedPoint) -> ResolvePathResult { fn empty(reached_fixedpoint: ReachedFixedPoint) -> ResolvePathResult {
ResolvePathResult::with(PerNs::none(), reached_fixedpoint, None) ResolvePathResult::with(PerNs::none(), reached_fixedpoint, None, None)
} }
fn with( fn with(
resolved_def: PerNs, resolved_def: PerNs,
reached_fixedpoint: ReachedFixedPoint, reached_fixedpoint: ReachedFixedPoint,
segment_index: Option<usize>, segment_index: Option<usize>,
krate: Option<CrateId>,
) -> ResolvePathResult { ) -> ResolvePathResult {
ResolvePathResult { resolved_def, reached_fixedpoint, segment_index } ResolvePathResult { resolved_def, reached_fixedpoint, segment_index, krate }
} }
} }
@ -67,8 +71,18 @@ impl CrateDefMap {
db: &impl DefDatabase, db: &impl DefDatabase,
mode: ResolveMode, mode: ResolveMode,
original_module: LocalModuleId, original_module: LocalModuleId,
path: &Path, path: &ModPath,
shadow: BuiltinShadowMode,
) -> ResolvePathResult { ) -> ResolvePathResult {
// if it is not the last segment, we prefer the module to the builtin
let prefer_module = |index| {
if index == path.segments.len() - 1 {
shadow
} else {
BuiltinShadowMode::Module
}
};
let mut segments = path.segments.iter().enumerate(); let mut segments = path.segments.iter().enumerate();
let mut curr_per_ns: PerNs = match path.kind { let mut curr_per_ns: PerNs = match path.kind {
PathKind::DollarCrate(krate) => { PathKind::DollarCrate(krate) => {
@ -85,9 +99,6 @@ impl CrateDefMap {
PathKind::Crate => { PathKind::Crate => {
PerNs::types(ModuleId { krate: self.krate, local_id: self.root }.into()) PerNs::types(ModuleId { krate: self.krate, local_id: self.root }.into())
} }
PathKind::Self_ => {
PerNs::types(ModuleId { krate: self.krate, local_id: original_module }.into())
}
// plain import or absolute path in 2015: crate-relative with // plain import or absolute path in 2015: crate-relative with
// fallback to extern prelude (with the simplification in // fallback to extern prelude (with the simplification in
// rust-lang/rust#57745) // rust-lang/rust#57745)
@ -96,24 +107,26 @@ impl CrateDefMap {
if self.edition == Edition::Edition2015 if self.edition == Edition::Edition2015
&& (path.kind == PathKind::Abs || mode == ResolveMode::Import) => && (path.kind == PathKind::Abs || mode == ResolveMode::Import) =>
{ {
let segment = match segments.next() { let (idx, segment) = match segments.next() {
Some((_, segment)) => segment, Some((idx, segment)) => (idx, segment),
None => return ResolvePathResult::empty(ReachedFixedPoint::Yes), None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
}; };
log::debug!("resolving {:?} in crate root (+ extern prelude)", segment); log::debug!("resolving {:?} in crate root (+ extern prelude)", segment);
self.resolve_name_in_crate_root_or_extern_prelude(&segment.name) self.resolve_name_in_crate_root_or_extern_prelude(&segment, prefer_module(idx))
} }
PathKind::Plain => { PathKind::Plain => {
let segment = match segments.next() { let (idx, segment) = match segments.next() {
Some((_, segment)) => segment, Some((idx, segment)) => (idx, segment),
None => return ResolvePathResult::empty(ReachedFixedPoint::Yes), None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
}; };
log::debug!("resolving {:?} in module", segment); log::debug!("resolving {:?} in module", segment);
self.resolve_name_in_module(db, original_module, &segment.name) self.resolve_name_in_module(db, original_module, &segment, prefer_module(idx))
} }
PathKind::Super => { PathKind::Super(lvl) => {
if let Some(p) = self.modules[original_module].parent { let m = successors(Some(original_module), |m| self.modules[*m].parent)
PerNs::types(ModuleId { krate: self.krate, local_id: p }.into()) .nth(lvl as usize);
if let Some(local_id) = m {
PerNs::types(ModuleId { krate: self.krate, local_id }.into())
} else { } else {
log::debug!("super path in root module"); log::debug!("super path in root module");
return ResolvePathResult::empty(ReachedFixedPoint::Yes); return ResolvePathResult::empty(ReachedFixedPoint::Yes);
@ -125,18 +138,13 @@ impl CrateDefMap {
Some((_, segment)) => segment, Some((_, segment)) => segment,
None => return ResolvePathResult::empty(ReachedFixedPoint::Yes), None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
}; };
if let Some(def) = self.extern_prelude.get(&segment.name) { if let Some(def) = self.extern_prelude.get(&segment) {
log::debug!("absolute path {:?} resolved to crate {:?}", path, def); log::debug!("absolute path {:?} resolved to crate {:?}", path, def);
PerNs::types(*def) PerNs::types(*def)
} else { } else {
return ResolvePathResult::empty(ReachedFixedPoint::No); // extern crate declarations can add to the extern prelude return ResolvePathResult::empty(ReachedFixedPoint::No); // extern crate declarations can add to the extern prelude
} }
} }
PathKind::Type(_) => {
// This is handled in `infer::infer_path_expr`
// The result returned here does not matter
return ResolvePathResult::empty(ReachedFixedPoint::Yes);
}
}; };
for (i, segment) in segments { for (i, segment) in segments {
@ -156,32 +164,29 @@ impl CrateDefMap {
curr_per_ns = match curr { curr_per_ns = match curr {
ModuleDefId::ModuleId(module) => { ModuleDefId::ModuleId(module) => {
if module.krate != self.krate { if module.krate != self.krate {
let path = let path = ModPath {
Path { segments: path.segments[i..].to_vec(), kind: PathKind::Self_ }; segments: path.segments[i..].to_vec(),
kind: PathKind::Super(0),
};
log::debug!("resolving {:?} in other crate", path); log::debug!("resolving {:?} in other crate", path);
let defp_map = db.crate_def_map(module.krate); let defp_map = db.crate_def_map(module.krate);
let (def, s) = defp_map.resolve_path(db, module.local_id, &path); let (def, s) = defp_map.resolve_path(db, module.local_id, &path, shadow);
return ResolvePathResult::with( return ResolvePathResult::with(
def, def,
ReachedFixedPoint::Yes, ReachedFixedPoint::Yes,
s.map(|s| s + i), s.map(|s| s + i),
Some(module.krate),
); );
} }
// Since it is a qualified path here, it should not contains legacy macros // Since it is a qualified path here, it should not contains legacy macros
match self[module.local_id].scope.get(&segment.name) { self[module.local_id].scope.get(&segment, prefer_module(i))
Some(res) => res.def,
_ => {
log::debug!("path segment {:?} not found", segment.name);
return ResolvePathResult::empty(ReachedFixedPoint::No);
}
}
} }
ModuleDefId::AdtId(AdtId::EnumId(e)) => { ModuleDefId::AdtId(AdtId::EnumId(e)) => {
// enum variant // enum variant
tested_by!(can_import_enum_variant); tested_by!(can_import_enum_variant);
let enum_data = db.enum_data(e); let enum_data = db.enum_data(e);
match enum_data.variant(&segment.name) { match enum_data.variant(&segment) {
Some(local_id) => { Some(local_id) => {
let variant = EnumVariantId { parent: e, local_id }; let variant = EnumVariantId { parent: e, local_id };
PerNs::both(variant.into(), variant.into()) PerNs::both(variant.into(), variant.into())
@ -191,6 +196,7 @@ impl CrateDefMap {
PerNs::types(e.into()), PerNs::types(e.into()),
ReachedFixedPoint::Yes, ReachedFixedPoint::Yes,
Some(i), Some(i),
Some(self.krate),
); );
} }
} }
@ -200,7 +206,7 @@ impl CrateDefMap {
// (`Struct::method`), or some other kind of associated item // (`Struct::method`), or some other kind of associated item
log::debug!( log::debug!(
"path segment {:?} resolved to non-module {:?}, but is not last", "path segment {:?} resolved to non-module {:?}, but is not last",
segment.name, segment,
curr, curr,
); );
@ -208,11 +214,13 @@ impl CrateDefMap {
PerNs::types(s), PerNs::types(s),
ReachedFixedPoint::Yes, ReachedFixedPoint::Yes,
Some(i), Some(i),
Some(self.krate),
); );
} }
}; };
} }
ResolvePathResult::with(curr_per_ns, ReachedFixedPoint::Yes, None)
ResolvePathResult::with(curr_per_ns, ReachedFixedPoint::Yes, None, Some(self.krate))
} }
fn resolve_name_in_module( fn resolve_name_in_module(
@ -220,6 +228,7 @@ impl CrateDefMap {
db: &impl DefDatabase, db: &impl DefDatabase,
module: LocalModuleId, module: LocalModuleId,
name: &Name, name: &Name,
shadow: BuiltinShadowMode,
) -> PerNs { ) -> PerNs {
// Resolve in: // Resolve in:
// - legacy scope of macro // - legacy scope of macro
@ -228,23 +237,31 @@ impl CrateDefMap {
// - std prelude // - std prelude
let from_legacy_macro = let from_legacy_macro =
self[module].scope.get_legacy_macro(name).map_or_else(PerNs::none, PerNs::macros); self[module].scope.get_legacy_macro(name).map_or_else(PerNs::none, PerNs::macros);
let from_scope = self[module].scope.get(name).map_or_else(PerNs::none, |res| res.def); let from_scope = self[module].scope.get(name, shadow);
let from_extern_prelude = let from_extern_prelude =
self.extern_prelude.get(name).map_or(PerNs::none(), |&it| PerNs::types(it)); self.extern_prelude.get(name).map_or(PerNs::none(), |&it| PerNs::types(it));
let from_prelude = self.resolve_in_prelude(db, name); let from_prelude = self.resolve_in_prelude(db, name, shadow);
from_legacy_macro.or(from_scope).or(from_extern_prelude).or(from_prelude) from_legacy_macro.or(from_scope).or(from_extern_prelude).or(from_prelude)
} }
fn resolve_name_in_crate_root_or_extern_prelude(&self, name: &Name) -> PerNs { fn resolve_name_in_crate_root_or_extern_prelude(
let from_crate_root = &self,
self[self.root].scope.get(name).map_or_else(PerNs::none, |res| res.def); name: &Name,
shadow: BuiltinShadowMode,
) -> PerNs {
let from_crate_root = self[self.root].scope.get(name, shadow);
let from_extern_prelude = self.resolve_name_in_extern_prelude(name); let from_extern_prelude = self.resolve_name_in_extern_prelude(name);
from_crate_root.or(from_extern_prelude) from_crate_root.or(from_extern_prelude)
} }
fn resolve_in_prelude(&self, db: &impl DefDatabase, name: &Name) -> PerNs { fn resolve_in_prelude(
&self,
db: &impl DefDatabase,
name: &Name,
shadow: BuiltinShadowMode,
) -> PerNs {
if let Some(prelude) = self.prelude { if let Some(prelude) = self.prelude {
let keep; let keep;
let def_map = if prelude.krate == self.krate { let def_map = if prelude.krate == self.krate {
@ -254,7 +271,7 @@ impl CrateDefMap {
keep = db.crate_def_map(prelude.krate); keep = db.crate_def_map(prelude.krate);
&keep &keep
}; };
def_map[prelude.local_id].scope.get(name).map_or_else(PerNs::none, |res| res.def) def_map[prelude.local_id].scope.get(name, shadow)
} else { } else {
PerNs::none() PerNs::none()
} }

View file

@ -10,21 +10,18 @@ use std::{ops::Index, sync::Arc};
use hir_expand::{ use hir_expand::{
ast_id_map::AstIdMap, ast_id_map::AstIdMap,
db::AstDatabase, db::AstDatabase,
either::Either,
hygiene::Hygiene, hygiene::Hygiene,
name::{AsName, Name}, name::{AsName, Name},
}; };
use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId}; use ra_arena::{impl_arena_id, Arena, RawId};
use ra_prof::profile;
use ra_syntax::{ use ra_syntax::{
ast::{self, AttrsOwner, NameOwner}, ast::{self, AttrsOwner, NameOwner},
AstNode, AstPtr, AstNode,
}; };
use test_utils::tested_by; use test_utils::tested_by;
use crate::{ use crate::{attr::Attrs, db::DefDatabase, path::ModPath, FileAstId, HirFileId, InFile};
attr::Attrs, db::DefDatabase, path::Path, trace::Trace, FileAstId, HirFileId, LocalImportId,
Source,
};
/// `RawItems` is a set of top-level items in a file (except for impls). /// `RawItems` is a set of top-level items in a file (except for impls).
/// ///
@ -33,7 +30,7 @@ use crate::{
#[derive(Debug, Default, PartialEq, Eq)] #[derive(Debug, Default, PartialEq, Eq)]
pub struct RawItems { pub struct RawItems {
modules: Arena<Module, ModuleData>, modules: Arena<Module, ModuleData>,
imports: Arena<LocalImportId, ImportData>, imports: Arena<Import, ImportData>,
defs: Arena<Def, DefData>, defs: Arena<Def, DefData>,
macros: Arena<Macro, MacroData>, macros: Arena<Macro, MacroData>,
impls: Arena<Impl, ImplData>, impls: Arena<Impl, ImplData>,
@ -41,35 +38,15 @@ pub struct RawItems {
items: Vec<RawItem>, items: Vec<RawItem>,
} }
#[derive(Debug, Default, PartialEq, Eq)]
pub struct ImportSourceMap {
map: ArenaMap<LocalImportId, ImportSourcePtr>,
}
type ImportSourcePtr = Either<AstPtr<ast::UseTree>, AstPtr<ast::ExternCrateItem>>;
impl ImportSourceMap {
pub fn get(&self, import: LocalImportId) -> ImportSourcePtr {
self.map[import].clone()
}
}
impl RawItems { impl RawItems {
pub(crate) fn raw_items_query( pub(crate) fn raw_items_query(
db: &(impl DefDatabase + AstDatabase), db: &(impl DefDatabase + AstDatabase),
file_id: HirFileId, file_id: HirFileId,
) -> Arc<RawItems> { ) -> Arc<RawItems> {
db.raw_items_with_source_map(file_id).0 let _p = profile("raw_items_query");
}
pub(crate) fn raw_items_with_source_map_query(
db: &(impl DefDatabase + AstDatabase),
file_id: HirFileId,
) -> (Arc<RawItems>, Arc<ImportSourceMap>) {
let mut collector = RawItemsCollector { let mut collector = RawItemsCollector {
raw_items: RawItems::default(), raw_items: RawItems::default(),
source_ast_id_map: db.ast_id_map(file_id), source_ast_id_map: db.ast_id_map(file_id),
imports: Trace::new(),
file_id, file_id,
hygiene: Hygiene::new(db, file_id), hygiene: Hygiene::new(db, file_id),
}; };
@ -80,11 +57,8 @@ impl RawItems {
collector.process_module(None, item_list); collector.process_module(None, item_list);
} }
} }
let mut raw_items = collector.raw_items; let raw_items = collector.raw_items;
let (arena, map) = collector.imports.into_arena_and_map(); Arc::new(raw_items)
raw_items.imports = arena;
let source_map = ImportSourceMap { map };
(Arc::new(raw_items), Arc::new(source_map))
} }
pub(super) fn items(&self) -> &[RawItem] { pub(super) fn items(&self) -> &[RawItem] {
@ -99,9 +73,9 @@ impl Index<Module> for RawItems {
} }
} }
impl Index<LocalImportId> for RawItems { impl Index<Import> for RawItems {
type Output = ImportData; type Output = ImportData;
fn index(&self, idx: LocalImportId) -> &ImportData { fn index(&self, idx: Import) -> &ImportData {
&self.imports[idx] &self.imports[idx]
} }
} }
@ -136,7 +110,7 @@ pub(super) struct RawItem {
#[derive(Debug, PartialEq, Eq, Clone, Copy)] #[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub(super) enum RawItemKind { pub(super) enum RawItemKind {
Module(Module), Module(Module),
Import(LocalImportId), Import(Import),
Def(Def), Def(Def),
Macro(Macro), Macro(Macro),
Impl(Impl), Impl(Impl),
@ -152,9 +126,13 @@ pub(super) enum ModuleData {
Definition { name: Name, ast_id: FileAstId<ast::Module>, items: Vec<RawItem> }, Definition { name: Name, ast_id: FileAstId<ast::Module>, items: Vec<RawItem> },
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(crate) struct Import(RawId);
impl_arena_id!(Import);
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct ImportData { pub struct ImportData {
pub(super) path: Path, pub(super) path: ModPath,
pub(super) alias: Option<Name>, pub(super) alias: Option<Name>,
pub(super) is_glob: bool, pub(super) is_glob: bool,
pub(super) is_prelude: bool, pub(super) is_prelude: bool,
@ -184,6 +162,21 @@ pub(super) enum DefKind {
TypeAlias(FileAstId<ast::TypeAliasDef>), TypeAlias(FileAstId<ast::TypeAliasDef>),
} }
impl DefKind {
pub fn ast_id(&self) -> FileAstId<ast::ModuleItem> {
match self {
DefKind::Function(it) => it.upcast(),
DefKind::Struct(it) => it.upcast(),
DefKind::Union(it) => it.upcast(),
DefKind::Enum(it) => it.upcast(),
DefKind::Const(it) => it.upcast(),
DefKind::Static(it) => it.upcast(),
DefKind::Trait(it) => it.upcast(),
DefKind::TypeAlias(it) => it.upcast(),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(super) struct Macro(RawId); pub(super) struct Macro(RawId);
impl_arena_id!(Macro); impl_arena_id!(Macro);
@ -191,7 +184,7 @@ impl_arena_id!(Macro);
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub(super) struct MacroData { pub(super) struct MacroData {
pub(super) ast_id: FileAstId<ast::MacroCall>, pub(super) ast_id: FileAstId<ast::MacroCall>,
pub(super) path: Path, pub(super) path: ModPath,
pub(super) name: Option<Name>, pub(super) name: Option<Name>,
pub(super) export: bool, pub(super) export: bool,
pub(super) builtin: bool, pub(super) builtin: bool,
@ -208,7 +201,6 @@ pub(super) struct ImplData {
struct RawItemsCollector { struct RawItemsCollector {
raw_items: RawItems, raw_items: RawItems,
imports: Trace<LocalImportId, ImportData, ImportSourcePtr>,
source_ast_id_map: Arc<AstIdMap>, source_ast_id_map: Arc<AstIdMap>,
file_id: HirFileId, file_id: HirFileId,
hygiene: Hygiene, hygiene: Hygiene,
@ -312,10 +304,10 @@ impl RawItemsCollector {
let attrs = self.parse_attrs(&use_item); let attrs = self.parse_attrs(&use_item);
let mut buf = Vec::new(); let mut buf = Vec::new();
Path::expand_use_item( ModPath::expand_use_item(
Source { value: use_item, file_id: self.file_id }, InFile { value: use_item, file_id: self.file_id },
&self.hygiene, &self.hygiene,
|path, use_tree, is_glob, alias| { |path, _use_tree, is_glob, alias| {
let import_data = ImportData { let import_data = ImportData {
path, path,
alias, alias,
@ -324,11 +316,11 @@ impl RawItemsCollector {
is_extern_crate: false, is_extern_crate: false,
is_macro_use: false, is_macro_use: false,
}; };
buf.push((import_data, Either::A(AstPtr::new(use_tree)))); buf.push(import_data);
}, },
); );
for (import_data, ptr) in buf { for import_data in buf {
self.push_import(current_module, attrs.clone(), import_data, ptr); self.push_import(current_module, attrs.clone(), import_data);
} }
} }
@ -338,7 +330,7 @@ impl RawItemsCollector {
extern_crate: ast::ExternCrateItem, extern_crate: ast::ExternCrateItem,
) { ) {
if let Some(name_ref) = extern_crate.name_ref() { if let Some(name_ref) = extern_crate.name_ref() {
let path = Path::from_name_ref(&name_ref); let path = ModPath::from_name_ref(&name_ref);
let alias = extern_crate.alias().and_then(|a| a.name()).map(|it| it.as_name()); let alias = extern_crate.alias().and_then(|a| a.name()).map(|it| it.as_name());
let attrs = self.parse_attrs(&extern_crate); let attrs = self.parse_attrs(&extern_crate);
// FIXME: cfg_attr // FIXME: cfg_attr
@ -351,18 +343,13 @@ impl RawItemsCollector {
is_extern_crate: true, is_extern_crate: true,
is_macro_use, is_macro_use,
}; };
self.push_import( self.push_import(current_module, attrs, import_data);
current_module,
attrs,
import_data,
Either::B(AstPtr::new(&extern_crate)),
);
} }
} }
fn add_macro(&mut self, current_module: Option<Module>, m: ast::MacroCall) { fn add_macro(&mut self, current_module: Option<Module>, m: ast::MacroCall) {
let attrs = self.parse_attrs(&m); let attrs = self.parse_attrs(&m);
let path = match m.path().and_then(|path| Path::from_src(path, &self.hygiene)) { let path = match m.path().and_then(|path| ModPath::from_src(path, &self.hygiene)) {
Some(it) => it, Some(it) => it,
_ => return, _ => return,
}; };
@ -387,14 +374,8 @@ impl RawItemsCollector {
self.push_item(current_module, attrs, RawItemKind::Impl(imp)) self.push_item(current_module, attrs, RawItemKind::Impl(imp))
} }
fn push_import( fn push_import(&mut self, current_module: Option<Module>, attrs: Attrs, data: ImportData) {
&mut self, let import = self.raw_items.imports.alloc(data);
current_module: Option<Module>,
attrs: Attrs,
data: ImportData,
source: ImportSourcePtr,
) {
let import = self.imports.alloc(|| source, || data);
self.push_item(current_module, attrs, RawItemKind::Import(import)) self.push_item(current_module, attrs, RawItemKind::Import(import))
} }

View file

@ -32,27 +32,22 @@ fn render_crate_def_map(map: &CrateDefMap) -> String {
*buf += path; *buf += path;
*buf += "\n"; *buf += "\n";
let mut entries = map.modules[module] let mut entries = map.modules[module].scope.collect_resolutions();
.scope entries.sort_by_key(|(name, _)| name.clone());
.items
.iter()
.map(|(name, res)| (name, res.def))
.collect::<Vec<_>>();
entries.sort_by_key(|(name, _)| *name);
for (name, res) in entries { for (name, def) in entries {
*buf += &format!("{}:", name); *buf += &format!("{}:", name);
if res.types.is_some() { if def.types.is_some() {
*buf += " t"; *buf += " t";
} }
if res.values.is_some() { if def.values.is_some() {
*buf += " v"; *buf += " v";
} }
if res.macros.is_some() { if def.macros.is_some() {
*buf += " m"; *buf += " m";
} }
if res.is_none() { if def.is_none() {
*buf += " _"; *buf += " _";
} }
@ -558,3 +553,35 @@ fn cfg_test() {
Foo: t v Foo: t v
"###); "###);
} }
#[test]
fn infer_multiple_namespace() {
let map = def_map(
r#"
//- /main.rs
mod a {
pub type T = ();
pub use crate::b::*;
}
use crate::a::T;
mod b {
pub const T: () = ();
}
"#,
);
assert_snapshot!(map, @r###"
crate
T: t v
a: t
b: t
crate::b
T: v
crate::a
T: t v
"###);
}

View file

@ -112,3 +112,24 @@ fn glob_enum() {
"### "###
); );
} }
#[test]
fn glob_enum_group() {
covers!(glob_enum_group);
let map = def_map(
"
//- /lib.rs
enum Foo {
Bar, Baz
}
use self::Foo::{*};
",
);
assert_snapshot!(map, @r###"
crate
Bar: t v
Baz: t v
Foo: t
"###
);
}

View file

@ -116,7 +116,7 @@ fn typing_inside_a_macro_should_not_invalidate_def_map() {
let events = db.log_executed(|| { let events = db.log_executed(|| {
let crate_def_map = db.crate_def_map(krate); let crate_def_map = db.crate_def_map(krate);
let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
assert_eq!(module_data.scope.items.len(), 1); assert_eq!(module_data.scope.collect_resolutions().len(), 1);
}); });
assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
} }
@ -126,7 +126,7 @@ fn typing_inside_a_macro_should_not_invalidate_def_map() {
let events = db.log_executed(|| { let events = db.log_executed(|| {
let crate_def_map = db.crate_def_map(krate); let crate_def_map = db.crate_def_map(krate);
let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
assert_eq!(module_data.scope.items.len(), 1); assert_eq!(module_data.scope.collect_resolutions().len(), 1);
}); });
assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
} }

View file

@ -600,3 +600,27 @@ fn macro_dollar_crate_is_correct_in_indirect_deps() {
bar: t v bar: t v
"###); "###);
} }
#[test]
fn expand_derive() {
let map = compute_crate_def_map(
"
//- /main.rs
#[derive(Clone)]
struct Foo;
",
);
assert_eq!(map.modules[map.root].scope.impls().len(), 1);
}
#[test]
fn expand_multiple_derive() {
let map = compute_crate_def_map(
"
//- /main.rs
#[derive(Copy, Clone)]
struct Foo;
",
);
assert_eq!(map.modules[map.root].scope.impls().len(), 2);
}

View file

@ -668,7 +668,7 @@ fn unresolved_module_diagnostics() {
module: LocalModuleId( module: LocalModuleId(
0, 0,
), ),
declaration: AstId { declaration: InFile {
file_id: HirFileId( file_id: HirFileId(
FileId( FileId(
FileId( FileId(
@ -676,7 +676,7 @@ fn unresolved_module_diagnostics() {
), ),
), ),
), ),
file_ast_id: FileAstId { value: FileAstId {
raw: ErasedFileAstId( raw: ErasedFileAstId(
1, 1,
), ),

View file

@ -1,35 +1,97 @@
//! A desugared representation of paths like `crate::foo` or `<Type as Trait>::bar`. //! A desugared representation of paths like `crate::foo` or `<Type as Trait>::bar`.
mod lower;
use std::{iter, sync::Arc}; use std::{iter, sync::Arc};
use hir_expand::{ use hir_expand::{
either::Either,
hygiene::Hygiene, hygiene::Hygiene,
name::{self, AsName, Name}, name::{AsName, Name},
}; };
use ra_db::CrateId; use ra_db::CrateId;
use ra_syntax::{ use ra_syntax::ast;
ast::{self, NameOwner, TypeAscriptionOwner},
AstNode,
};
use crate::{type_ref::TypeRef, Source}; use crate::{type_ref::TypeRef, InFile};
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ModPath {
pub kind: PathKind,
pub segments: Vec<Name>,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum PathKind {
Plain,
/// `self::` is `Super(0)`
Super(u8),
Crate,
/// Absolute path (::foo)
Abs,
/// `$crate` from macro expansion
DollarCrate(CrateId),
}
impl ModPath {
pub fn from_src(path: ast::Path, hygiene: &Hygiene) -> Option<ModPath> {
lower::lower_path(path, hygiene).map(|it| it.mod_path)
}
pub fn from_simple_segments(
kind: PathKind,
segments: impl IntoIterator<Item = Name>,
) -> ModPath {
let segments = segments.into_iter().collect::<Vec<_>>();
ModPath { kind, segments }
}
pub(crate) fn from_name_ref(name_ref: &ast::NameRef) -> ModPath {
name_ref.as_name().into()
}
/// Converts an `tt::Ident` into a single-identifier `Path`.
pub(crate) fn from_tt_ident(ident: &tt::Ident) -> ModPath {
ident.as_name().into()
}
/// Calls `cb` with all paths, represented by this use item.
pub(crate) fn expand_use_item(
item_src: InFile<ast::UseItem>,
hygiene: &Hygiene,
mut cb: impl FnMut(ModPath, &ast::UseTree, /* is_glob */ bool, Option<Name>),
) {
if let Some(tree) = item_src.value.use_tree() {
lower::lower_use_tree(None, tree, hygiene, &mut cb);
}
}
pub fn is_ident(&self) -> bool {
self.kind == PathKind::Plain && self.segments.len() == 1
}
pub fn is_self(&self) -> bool {
self.kind == PathKind::Super(0) && self.segments.is_empty()
}
/// If this path is a single identifier, like `foo`, return its name.
pub fn as_ident(&self) -> Option<&Name> {
if self.kind != PathKind::Plain || self.segments.len() > 1 {
return None;
}
self.segments.first()
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Path { pub struct Path {
pub kind: PathKind, /// Type based path like `<T>::foo`.
pub segments: Vec<PathSegment>, /// Note that paths like `<Type as Trait>::foo` are desugard to `Trait::<Self=Type>::foo`.
} type_anchor: Option<Box<TypeRef>>,
mod_path: ModPath,
#[derive(Debug, Clone, PartialEq, Eq, Hash)] /// Invariant: the same len as self.path.segments
pub struct PathSegment { generic_args: Vec<Option<Arc<GenericArgs>>>,
pub name: Name,
pub args_and_bindings: Option<Arc<GenericArgs>>,
} }
/// Generic arguments to a path segment (e.g. the `i32` in `Option<i32>`). This /// Generic arguments to a path segment (e.g. the `i32` in `Option<i32>`). This
/// can (in the future) also include bindings of associated types, like in /// also includes bindings of associated types, like in `Iterator<Item = Foo>`.
/// `Iterator<Item = Foo>`.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct GenericArgs { pub struct GenericArgs {
pub args: Vec<GenericArg>, pub args: Vec<GenericArg>,
@ -50,234 +112,111 @@ pub enum GenericArg {
// or lifetime... // or lifetime...
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum PathKind {
Plain,
Self_,
Super,
Crate,
// Absolute path
Abs,
// Type based path like `<T>::foo`
Type(Box<TypeRef>),
// `$crate` from macro expansion
DollarCrate(CrateId),
}
impl Path { impl Path {
/// Calls `cb` with all paths, represented by this use item.
pub(crate) fn expand_use_item(
item_src: Source<ast::UseItem>,
hygiene: &Hygiene,
mut cb: impl FnMut(Path, &ast::UseTree, bool, Option<Name>),
) {
if let Some(tree) = item_src.value.use_tree() {
expand_use_tree(None, tree, hygiene, &mut cb);
}
}
pub(crate) fn from_simple_segments(
kind: PathKind,
segments: impl IntoIterator<Item = Name>,
) -> Path {
Path {
kind,
segments: segments
.into_iter()
.map(|name| PathSegment { name, args_and_bindings: None })
.collect(),
}
}
/// Converts an `ast::Path` to `Path`. Works with use trees. /// Converts an `ast::Path` to `Path`. Works with use trees.
/// DEPRECATED: It does not handle `$crate` from macro call. /// DEPRECATED: It does not handle `$crate` from macro call.
pub fn from_ast(path: ast::Path) -> Option<Path> { pub fn from_ast(path: ast::Path) -> Option<Path> {
Path::from_src(path, &Hygiene::new_unhygienic()) lower::lower_path(path, &Hygiene::new_unhygienic())
} }
/// Converts an `ast::Path` to `Path`. Works with use trees. /// Converts an `ast::Path` to `Path`. Works with use trees.
/// It correctly handles `$crate` based path from macro call. /// It correctly handles `$crate` based path from macro call.
pub fn from_src(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path> { pub fn from_src(path: ast::Path, hygiene: &Hygiene) -> Option<Path> {
let mut kind = PathKind::Plain; lower::lower_path(path, hygiene)
let mut segments = Vec::new();
loop {
let segment = path.segment()?;
if segment.has_colon_colon() {
kind = PathKind::Abs;
}
match segment.kind()? {
ast::PathSegmentKind::Name(name_ref) => {
// FIXME: this should just return name
match hygiene.name_ref_to_name(name_ref) {
Either::A(name) => {
let args = segment
.type_arg_list()
.and_then(GenericArgs::from_ast)
.or_else(|| {
GenericArgs::from_fn_like_path_ast(
segment.param_list(),
segment.ret_type(),
)
})
.map(Arc::new);
let segment = PathSegment { name, args_and_bindings: args };
segments.push(segment);
}
Either::B(crate_id) => {
kind = PathKind::DollarCrate(crate_id);
break;
}
}
}
ast::PathSegmentKind::Type { type_ref, trait_ref } => {
assert!(path.qualifier().is_none()); // this can only occur at the first segment
let self_type = TypeRef::from_ast(type_ref?);
match trait_ref {
// <T>::foo
None => {
kind = PathKind::Type(Box::new(self_type));
}
// <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo
Some(trait_ref) => {
let path = Path::from_src(trait_ref.path()?, hygiene)?;
kind = path.kind;
let mut prefix_segments = path.segments;
prefix_segments.reverse();
segments.extend(prefix_segments);
// Insert the type reference (T in the above example) as Self parameter for the trait
let mut last_segment = segments.last_mut()?;
if last_segment.args_and_bindings.is_none() {
last_segment.args_and_bindings =
Some(Arc::new(GenericArgs::empty()));
};
let args = last_segment.args_and_bindings.as_mut().unwrap();
let mut args_inner = Arc::make_mut(args);
args_inner.has_self_type = true;
args_inner.args.insert(0, GenericArg::Type(self_type));
}
}
}
ast::PathSegmentKind::CrateKw => {
kind = PathKind::Crate;
break;
}
ast::PathSegmentKind::SelfKw => {
kind = PathKind::Self_;
break;
}
ast::PathSegmentKind::SuperKw => {
kind = PathKind::Super;
break;
}
}
path = match qualifier(&path) {
Some(it) => it,
None => break,
};
}
segments.reverse();
return Some(Path { kind, segments });
fn qualifier(path: &ast::Path) -> Option<ast::Path> {
if let Some(q) = path.qualifier() {
return Some(q);
}
// FIXME: this bottom up traversal is not too precise.
// Should we handle do a top-down analysis, recording results?
let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?;
let use_tree = use_tree_list.parent_use_tree();
use_tree.path()
}
} }
/// Converts an `ast::NameRef` into a single-identifier `Path`. /// Converts an `ast::NameRef` into a single-identifier `Path`.
pub(crate) fn from_name_ref(name_ref: &ast::NameRef) -> Path { pub(crate) fn from_name_ref(name_ref: &ast::NameRef) -> Path {
name_ref.as_name().into() Path { type_anchor: None, mod_path: name_ref.as_name().into(), generic_args: vec![None] }
} }
/// `true` is this path is a single identifier, like `foo` pub fn kind(&self) -> &PathKind {
pub fn is_ident(&self) -> bool { &self.mod_path.kind
self.kind == PathKind::Plain && self.segments.len() == 1
} }
/// `true` if this path is just a standalone `self` pub fn type_anchor(&self) -> Option<&TypeRef> {
pub fn is_self(&self) -> bool { self.type_anchor.as_deref()
self.kind == PathKind::Self_ && self.segments.is_empty()
} }
/// If this path is a single identifier, like `foo`, return its name. pub fn segments(&self) -> PathSegments<'_> {
pub fn as_ident(&self) -> Option<&Name> { PathSegments {
if self.kind != PathKind::Plain || self.segments.len() > 1 { segments: self.mod_path.segments.as_slice(),
generic_args: self.generic_args.as_slice(),
}
}
pub fn mod_path(&self) -> &ModPath {
&self.mod_path
}
pub fn qualifier(&self) -> Option<Path> {
if self.mod_path.is_ident() {
return None; return None;
} }
self.segments.first().map(|s| &s.name) let res = Path {
type_anchor: self.type_anchor.clone(),
mod_path: ModPath {
kind: self.mod_path.kind.clone(),
segments: self.mod_path.segments[..self.mod_path.segments.len() - 1].to_vec(),
},
generic_args: self.generic_args[..self.generic_args.len() - 1].to_vec(),
};
Some(res)
} }
}
pub fn expand_macro_expr(&self) -> Option<Name> { #[derive(Debug, Clone, PartialEq, Eq, Hash)]
self.as_ident().and_then(|name| Some(name.clone())) pub struct PathSegment<'a> {
pub name: &'a Name,
pub args_and_bindings: Option<&'a GenericArgs>,
}
pub struct PathSegments<'a> {
segments: &'a [Name],
generic_args: &'a [Option<Arc<GenericArgs>>],
}
impl<'a> PathSegments<'a> {
pub const EMPTY: PathSegments<'static> = PathSegments { segments: &[], generic_args: &[] };
pub fn is_empty(&self) -> bool {
self.len() == 0
} }
pub fn len(&self) -> usize {
pub fn is_type_relative(&self) -> bool { self.segments.len()
match self.kind { }
PathKind::Type(_) => true, pub fn first(&self) -> Option<PathSegment<'a>> {
_ => false, self.get(0)
} }
pub fn last(&self) -> Option<PathSegment<'a>> {
self.get(self.len().checked_sub(1)?)
}
pub fn get(&self, idx: usize) -> Option<PathSegment<'a>> {
assert_eq!(self.segments.len(), self.generic_args.len());
let res = PathSegment {
name: self.segments.get(idx)?,
args_and_bindings: self.generic_args.get(idx).unwrap().as_ref().map(|it| &**it),
};
Some(res)
}
pub fn skip(&self, len: usize) -> PathSegments<'a> {
assert_eq!(self.segments.len(), self.generic_args.len());
PathSegments { segments: &self.segments[len..], generic_args: &self.generic_args[len..] }
}
pub fn take(&self, len: usize) -> PathSegments<'a> {
assert_eq!(self.segments.len(), self.generic_args.len());
PathSegments { segments: &self.segments[..len], generic_args: &self.generic_args[..len] }
}
pub fn iter(&self) -> impl Iterator<Item = PathSegment<'a>> {
self.segments.iter().zip(self.generic_args.iter()).map(|(name, args)| PathSegment {
name,
args_and_bindings: args.as_ref().map(|it| &**it),
})
} }
} }
impl GenericArgs { impl GenericArgs {
pub(crate) fn from_ast(node: ast::TypeArgList) -> Option<GenericArgs> { pub(crate) fn from_ast(node: ast::TypeArgList) -> Option<GenericArgs> {
let mut args = Vec::new(); lower::lower_generic_args(node)
for type_arg in node.type_args() {
let type_ref = TypeRef::from_ast_opt(type_arg.type_ref());
args.push(GenericArg::Type(type_ref));
}
// lifetimes ignored for now
let mut bindings = Vec::new();
for assoc_type_arg in node.assoc_type_args() {
if let Some(name_ref) = assoc_type_arg.name_ref() {
let name = name_ref.as_name();
let type_ref = TypeRef::from_ast_opt(assoc_type_arg.type_ref());
bindings.push((name, type_ref));
}
}
if args.is_empty() && bindings.is_empty() {
None
} else {
Some(GenericArgs { args, has_self_type: false, bindings })
}
}
/// Collect `GenericArgs` from the parts of a fn-like path, i.e. `Fn(X, Y)
/// -> Z` (which desugars to `Fn<(X, Y), Output=Z>`).
pub(crate) fn from_fn_like_path_ast(
params: Option<ast::ParamList>,
ret_type: Option<ast::RetType>,
) -> Option<GenericArgs> {
let mut args = Vec::new();
let mut bindings = Vec::new();
if let Some(params) = params {
let mut param_types = Vec::new();
for param in params.params() {
let type_ref = TypeRef::from_ast_opt(param.ascribed_type());
param_types.push(type_ref);
}
let arg = GenericArg::Type(TypeRef::Tuple(param_types));
args.push(arg);
}
if let Some(ret_type) = ret_type {
let type_ref = TypeRef::from_ast_opt(ret_type.type_ref());
bindings.push((name::OUTPUT_TYPE, type_ref))
}
if args.is_empty() && bindings.is_empty() {
None
} else {
Some(GenericArgs { args, has_self_type: false, bindings })
}
} }
pub(crate) fn empty() -> GenericArgs { pub(crate) fn empty() -> GenericArgs {
@ -287,137 +226,51 @@ impl GenericArgs {
impl From<Name> for Path { impl From<Name> for Path {
fn from(name: Name) -> Path { fn from(name: Name) -> Path {
Path::from_simple_segments(PathKind::Plain, iter::once(name)) Path {
} type_anchor: None,
} mod_path: ModPath::from_simple_segments(PathKind::Plain, iter::once(name)),
generic_args: vec![None],
fn expand_use_tree(
prefix: Option<Path>,
tree: ast::UseTree,
hygiene: &Hygiene,
cb: &mut dyn FnMut(Path, &ast::UseTree, bool, Option<Name>),
) {
if let Some(use_tree_list) = tree.use_tree_list() {
let prefix = match tree.path() {
// E.g. use something::{{{inner}}};
None => prefix,
// E.g. `use something::{inner}` (prefix is `None`, path is `something`)
// or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`)
Some(path) => match convert_path(prefix, path, hygiene) {
Some(it) => Some(it),
None => return, // FIXME: report errors somewhere
},
};
for child_tree in use_tree_list.use_trees() {
expand_use_tree(prefix.clone(), child_tree, hygiene, cb);
}
} else {
let alias = tree.alias().and_then(|a| a.name()).map(|a| a.as_name());
if let Some(ast_path) = tree.path() {
// Handle self in a path.
// E.g. `use something::{self, <...>}`
if ast_path.qualifier().is_none() {
if let Some(segment) = ast_path.segment() {
if segment.kind() == Some(ast::PathSegmentKind::SelfKw) {
if let Some(prefix) = prefix {
cb(prefix, &tree, false, alias);
return;
}
}
}
}
if let Some(path) = convert_path(prefix, ast_path, hygiene) {
let is_glob = tree.has_star();
cb(path, &tree, is_glob, alias)
}
// FIXME: report errors somewhere
// We get here if we do
} }
} }
} }
fn convert_path(prefix: Option<Path>, path: ast::Path, hygiene: &Hygiene) -> Option<Path> { impl From<Name> for ModPath {
let prefix = if let Some(qual) = path.qualifier() { fn from(name: Name) -> ModPath {
Some(convert_path(prefix, qual, hygiene)?) ModPath::from_simple_segments(PathKind::Plain, iter::once(name))
} else { }
prefix }
pub use hir_expand::name as __name;
#[macro_export]
macro_rules! __known_path {
(std::iter::IntoIterator) => {};
(std::result::Result) => {};
(std::ops::Range) => {};
(std::ops::RangeFrom) => {};
(std::ops::RangeFull) => {};
(std::ops::RangeTo) => {};
(std::ops::RangeToInclusive) => {};
(std::ops::RangeInclusive) => {};
(std::boxed::Box) => {};
(std::future::Future) => {};
(std::ops::Try) => {};
(std::ops::Neg) => {};
(std::ops::Not) => {};
(std::ops::Index) => {};
($path:path) => {
compile_error!("Please register your known path in the path module")
}; };
let segment = path.segment()?;
let res = match segment.kind()? {
ast::PathSegmentKind::Name(name_ref) => {
match hygiene.name_ref_to_name(name_ref) {
Either::A(name) => {
// no type args in use
let mut res = prefix.unwrap_or_else(|| Path {
kind: PathKind::Plain,
segments: Vec::with_capacity(1),
});
res.segments.push(PathSegment {
name,
args_and_bindings: None, // no type args in use
});
res
}
Either::B(crate_id) => {
return Some(Path::from_simple_segments(
PathKind::DollarCrate(crate_id),
iter::empty(),
))
}
}
}
ast::PathSegmentKind::CrateKw => {
if prefix.is_some() {
return None;
}
Path::from_simple_segments(PathKind::Crate, iter::empty())
}
ast::PathSegmentKind::SelfKw => {
if prefix.is_some() {
return None;
}
Path::from_simple_segments(PathKind::Self_, iter::empty())
}
ast::PathSegmentKind::SuperKw => {
if prefix.is_some() {
return None;
}
Path::from_simple_segments(PathKind::Super, iter::empty())
}
ast::PathSegmentKind::Type { .. } => {
// not allowed in imports
return None;
}
};
Some(res)
} }
pub mod known { #[macro_export]
use hir_expand::name; macro_rules! __path {
($start:ident $(:: $seg:ident)*) => ({
use super::{Path, PathKind}; $crate::__known_path!($start $(:: $seg)*);
$crate::path::ModPath::from_simple_segments($crate::path::PathKind::Abs, vec![
pub fn std_iter_into_iterator() -> Path { $crate::path::__name![$start], $($crate::path::__name![$seg],)*
Path::from_simple_segments( ])
PathKind::Abs, });
vec![name::STD, name::ITER, name::INTO_ITERATOR_TYPE],
)
}
pub fn std_ops_try() -> Path {
Path::from_simple_segments(PathKind::Abs, vec![name::STD, name::OPS, name::TRY_TYPE])
}
pub fn std_result_result() -> Path {
Path::from_simple_segments(PathKind::Abs, vec![name::STD, name::RESULT, name::RESULT_TYPE])
}
pub fn std_future_future() -> Path {
Path::from_simple_segments(PathKind::Abs, vec![name::STD, name::FUTURE, name::FUTURE_TYPE])
}
pub fn std_boxed_box() -> Path {
Path::from_simple_segments(PathKind::Abs, vec![name::STD, name::BOXED, name::BOX_TYPE])
}
} }
pub use crate::__path as path;

View file

@ -0,0 +1,178 @@
//! Transforms syntax into `Path` objects, ideally with accounting for hygiene
mod lower_use;
use std::sync::Arc;
use either::Either;
use hir_expand::{
hygiene::Hygiene,
name::{name, AsName},
};
use ra_syntax::ast::{self, AstNode, TypeAscriptionOwner};
use crate::{
path::{GenericArg, GenericArgs, ModPath, Path, PathKind},
type_ref::TypeRef,
};
pub(super) use lower_use::lower_use_tree;
/// Converts an `ast::Path` to `Path`. Works with use trees.
/// It correctly handles `$crate` based path from macro call.
pub(super) fn lower_path(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path> {
let mut kind = PathKind::Plain;
let mut type_anchor = None;
let mut segments = Vec::new();
let mut generic_args = Vec::new();
loop {
let segment = path.segment()?;
if segment.has_colon_colon() {
kind = PathKind::Abs;
}
match segment.kind()? {
ast::PathSegmentKind::Name(name_ref) => {
// FIXME: this should just return name
match hygiene.name_ref_to_name(name_ref) {
Either::Left(name) => {
let args = segment
.type_arg_list()
.and_then(lower_generic_args)
.or_else(|| {
lower_generic_args_from_fn_path(
segment.param_list(),
segment.ret_type(),
)
})
.map(Arc::new);
segments.push(name);
generic_args.push(args)
}
Either::Right(crate_id) => {
kind = PathKind::DollarCrate(crate_id);
break;
}
}
}
ast::PathSegmentKind::Type { type_ref, trait_ref } => {
assert!(path.qualifier().is_none()); // this can only occur at the first segment
let self_type = TypeRef::from_ast(type_ref?);
match trait_ref {
// <T>::foo
None => {
type_anchor = Some(Box::new(self_type));
kind = PathKind::Plain;
}
// <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo
Some(trait_ref) => {
let path = Path::from_src(trait_ref.path()?, hygiene)?;
kind = path.mod_path.kind;
let mut prefix_segments = path.mod_path.segments;
prefix_segments.reverse();
segments.extend(prefix_segments);
let mut prefix_args = path.generic_args;
prefix_args.reverse();
generic_args.extend(prefix_args);
// Insert the type reference (T in the above example) as Self parameter for the trait
let last_segment = generic_args.last_mut()?;
if last_segment.is_none() {
*last_segment = Some(Arc::new(GenericArgs::empty()));
};
let args = last_segment.as_mut().unwrap();
let mut args_inner = Arc::make_mut(args);
args_inner.has_self_type = true;
args_inner.args.insert(0, GenericArg::Type(self_type));
}
}
}
ast::PathSegmentKind::CrateKw => {
kind = PathKind::Crate;
break;
}
ast::PathSegmentKind::SelfKw => {
kind = PathKind::Super(0);
break;
}
ast::PathSegmentKind::SuperKw => {
kind = PathKind::Super(1);
break;
}
}
path = match qualifier(&path) {
Some(it) => it,
None => break,
};
}
segments.reverse();
generic_args.reverse();
let mod_path = ModPath { kind, segments };
return Some(Path { type_anchor, mod_path, generic_args });
fn qualifier(path: &ast::Path) -> Option<ast::Path> {
if let Some(q) = path.qualifier() {
return Some(q);
}
// FIXME: this bottom up traversal is not too precise.
// Should we handle do a top-down analysis, recording results?
let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?;
let use_tree = use_tree_list.parent_use_tree();
use_tree.path()
}
}
pub(super) fn lower_generic_args(node: ast::TypeArgList) -> Option<GenericArgs> {
let mut args = Vec::new();
for type_arg in node.type_args() {
let type_ref = TypeRef::from_ast_opt(type_arg.type_ref());
args.push(GenericArg::Type(type_ref));
}
// lifetimes ignored for now
let mut bindings = Vec::new();
for assoc_type_arg in node.assoc_type_args() {
if let Some(name_ref) = assoc_type_arg.name_ref() {
let name = name_ref.as_name();
let type_ref = TypeRef::from_ast_opt(assoc_type_arg.type_ref());
bindings.push((name, type_ref));
}
}
if args.is_empty() && bindings.is_empty() {
None
} else {
Some(GenericArgs { args, has_self_type: false, bindings })
}
}
/// Collect `GenericArgs` from the parts of a fn-like path, i.e. `Fn(X, Y)
/// -> Z` (which desugars to `Fn<(X, Y), Output=Z>`).
fn lower_generic_args_from_fn_path(
params: Option<ast::ParamList>,
ret_type: Option<ast::RetType>,
) -> Option<GenericArgs> {
let mut args = Vec::new();
let mut bindings = Vec::new();
if let Some(params) = params {
let mut param_types = Vec::new();
for param in params.params() {
let type_ref = TypeRef::from_ast_opt(param.ascribed_type());
param_types.push(type_ref);
}
let arg = GenericArg::Type(TypeRef::Tuple(param_types));
args.push(arg);
}
if let Some(ret_type) = ret_type {
let type_ref = TypeRef::from_ast_opt(ret_type.type_ref());
bindings.push((name![Output], type_ref))
}
if args.is_empty() && bindings.is_empty() {
None
} else {
Some(GenericArgs { args, has_self_type: false, bindings })
}
}

View file

@ -0,0 +1,118 @@
//! Lowers a single complex use like `use foo::{bar, baz};` into a list of paths like
//! `foo::bar`, `foo::baz`;
use std::iter;
use either::Either;
use hir_expand::{
hygiene::Hygiene,
name::{AsName, Name},
};
use ra_syntax::ast::{self, NameOwner};
use test_utils::tested_by;
use crate::path::{ModPath, PathKind};
pub(crate) fn lower_use_tree(
prefix: Option<ModPath>,
tree: ast::UseTree,
hygiene: &Hygiene,
cb: &mut dyn FnMut(ModPath, &ast::UseTree, bool, Option<Name>),
) {
if let Some(use_tree_list) = tree.use_tree_list() {
let prefix = match tree.path() {
// E.g. use something::{{{inner}}};
None => prefix,
// E.g. `use something::{inner}` (prefix is `None`, path is `something`)
// or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`)
Some(path) => match convert_path(prefix, path, hygiene) {
Some(it) => Some(it),
None => return, // FIXME: report errors somewhere
},
};
for child_tree in use_tree_list.use_trees() {
lower_use_tree(prefix.clone(), child_tree, hygiene, cb);
}
} else {
let alias = tree.alias().and_then(|a| a.name()).map(|a| a.as_name());
let is_glob = tree.has_star();
if let Some(ast_path) = tree.path() {
// Handle self in a path.
// E.g. `use something::{self, <...>}`
if ast_path.qualifier().is_none() {
if let Some(segment) = ast_path.segment() {
if segment.kind() == Some(ast::PathSegmentKind::SelfKw) {
if let Some(prefix) = prefix {
cb(prefix, &tree, false, alias);
return;
}
}
}
}
if let Some(path) = convert_path(prefix, ast_path, hygiene) {
cb(path, &tree, is_glob, alias)
}
// FIXME: report errors somewhere
// We get here if we do
} else if is_glob {
tested_by!(glob_enum_group);
if let Some(prefix) = prefix {
cb(prefix, &tree, is_glob, None)
}
}
}
}
fn convert_path(prefix: Option<ModPath>, path: ast::Path, hygiene: &Hygiene) -> Option<ModPath> {
let prefix = if let Some(qual) = path.qualifier() {
Some(convert_path(prefix, qual, hygiene)?)
} else {
prefix
};
let segment = path.segment()?;
let res = match segment.kind()? {
ast::PathSegmentKind::Name(name_ref) => {
match hygiene.name_ref_to_name(name_ref) {
Either::Left(name) => {
// no type args in use
let mut res = prefix.unwrap_or_else(|| ModPath {
kind: PathKind::Plain,
segments: Vec::with_capacity(1),
});
res.segments.push(name);
res
}
Either::Right(crate_id) => {
return Some(ModPath::from_simple_segments(
PathKind::DollarCrate(crate_id),
iter::empty(),
))
}
}
}
ast::PathSegmentKind::CrateKw => {
if prefix.is_some() {
return None;
}
ModPath::from_simple_segments(PathKind::Crate, iter::empty())
}
ast::PathSegmentKind::SelfKw => {
if prefix.is_some() {
return None;
}
ModPath::from_simple_segments(PathKind::Super(0), iter::empty())
}
ast::PathSegmentKind::SuperKw => {
if prefix.is_some() {
return None;
}
ModPath::from_simple_segments(PathKind::Super(1), iter::empty())
}
ast::PathSegmentKind::Type { .. } => {
// not allowed in imports
return None;
}
};
Some(res)
}

View file

@ -11,8 +11,6 @@ use crate::ModuleDefId;
pub struct PerNs { pub struct PerNs {
pub types: Option<ModuleDefId>, pub types: Option<ModuleDefId>,
pub values: Option<ModuleDefId>, pub values: Option<ModuleDefId>,
/// Since macros has different type, many methods simply ignore it.
/// We can only use special method like `get_macros` to access it.
pub macros: Option<MacroDefId>, pub macros: Option<MacroDefId>,
} }

View file

@ -2,7 +2,7 @@
use std::sync::Arc; use std::sync::Arc;
use hir_expand::{ use hir_expand::{
name::{self, Name}, name::{name, Name},
MacroDefId, MacroDefId,
}; };
use ra_db::CrateId; use ra_db::CrateId;
@ -10,20 +10,23 @@ use rustc_hash::FxHashSet;
use crate::{ use crate::{
body::scope::{ExprScopes, ScopeId}, body::scope::{ExprScopes, ScopeId},
body::Body,
builtin_type::BuiltinType, builtin_type::BuiltinType,
db::DefDatabase, db::DefDatabase,
expr::{ExprId, PatId}, expr::{ExprId, PatId},
generics::GenericParams, generics::GenericParams,
item_scope::BuiltinShadowMode,
nameres::CrateDefMap, nameres::CrateDefMap,
path::{Path, PathKind}, path::{ModPath, PathKind},
per_ns::PerNs, per_ns::PerNs,
AdtId, AstItemDef, ConstId, ContainerId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, AdtId, AssocContainerId, ConstId, ContainerId, DefWithBodyId, EnumId, EnumVariantId,
GenericDefId, HasModule, ImplId, LocalModuleId, Lookup, ModuleDefId, ModuleId, StaticId, FunctionId, GenericDefId, HasModule, ImplId, LocalModuleId, Lookup, ModuleDefId, ModuleId,
StructId, TraitId, TypeAliasId, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, VariantId,
}; };
#[derive(Debug, Clone, Default)] #[derive(Debug, Clone, Default)]
pub struct Resolver { pub struct Resolver {
// FIXME: all usages generally call `.rev`, so maybe reverse once in consturciton?
scopes: Vec<Scope>, scopes: Vec<Scope>,
} }
@ -53,12 +56,14 @@ enum Scope {
AdtScope(AdtId), AdtScope(AdtId),
/// Local bindings /// Local bindings
ExprScope(ExprScope), ExprScope(ExprScope),
/// Temporary hack to support local items.
LocalItemsScope(Arc<Body>),
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum TypeNs { pub enum TypeNs {
SelfType(ImplId), SelfType(ImplId),
GenericParam(u32), GenericParam(TypeParamId),
AdtId(AdtId), AdtId(AdtId),
AdtSelfType(AdtId), AdtSelfType(AdtId),
// Yup, enum variants are added to the types ns, but any usage of variant as // Yup, enum variants are added to the types ns, but any usage of variant as
@ -90,8 +95,8 @@ pub enum ValueNs {
impl Resolver { impl Resolver {
/// Resolve known trait from std, like `std::futures::Future` /// Resolve known trait from std, like `std::futures::Future`
pub fn resolve_known_trait(&self, db: &impl DefDatabase, path: &Path) -> Option<TraitId> { pub fn resolve_known_trait(&self, db: &impl DefDatabase, path: &ModPath) -> Option<TraitId> {
let res = self.resolve_module_path(db, path).take_types()?; let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?;
match res { match res {
ModuleDefId::TraitId(it) => Some(it), ModuleDefId::TraitId(it) => Some(it),
_ => None, _ => None,
@ -99,8 +104,8 @@ impl Resolver {
} }
/// Resolve known struct from std, like `std::boxed::Box` /// Resolve known struct from std, like `std::boxed::Box`
pub fn resolve_known_struct(&self, db: &impl DefDatabase, path: &Path) -> Option<StructId> { pub fn resolve_known_struct(&self, db: &impl DefDatabase, path: &ModPath) -> Option<StructId> {
let res = self.resolve_module_path(db, path).take_types()?; let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?;
match res { match res {
ModuleDefId::AdtId(AdtId::StructId(it)) => Some(it), ModuleDefId::AdtId(AdtId::StructId(it)) => Some(it),
_ => None, _ => None,
@ -108,87 +113,116 @@ impl Resolver {
} }
/// Resolve known enum from std, like `std::result::Result` /// Resolve known enum from std, like `std::result::Result`
pub fn resolve_known_enum(&self, db: &impl DefDatabase, path: &Path) -> Option<EnumId> { pub fn resolve_known_enum(&self, db: &impl DefDatabase, path: &ModPath) -> Option<EnumId> {
let res = self.resolve_module_path(db, path).take_types()?; let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?;
match res { match res {
ModuleDefId::AdtId(AdtId::EnumId(it)) => Some(it), ModuleDefId::AdtId(AdtId::EnumId(it)) => Some(it),
_ => None, _ => None,
} }
} }
/// pub only for source-binder fn resolve_module_path(
pub fn resolve_module_path(&self, db: &impl DefDatabase, path: &Path) -> PerNs { &self,
db: &impl DefDatabase,
path: &ModPath,
shadow: BuiltinShadowMode,
) -> PerNs {
let (item_map, module) = match self.module() { let (item_map, module) = match self.module() {
Some(it) => it, Some(it) => it,
None => return PerNs::none(), None => return PerNs::none(),
}; };
let (module_res, segment_index) = item_map.resolve_path(db, module, path); let (module_res, segment_index) = item_map.resolve_path(db, module, &path, shadow);
if segment_index.is_some() { if segment_index.is_some() {
return PerNs::none(); return PerNs::none();
} }
module_res module_res
} }
pub fn resolve_module_path_in_items(&self, db: &impl DefDatabase, path: &ModPath) -> PerNs {
self.resolve_module_path(db, path, BuiltinShadowMode::Module)
}
pub fn resolve_path_in_type_ns( pub fn resolve_path_in_type_ns(
&self, &self,
db: &impl DefDatabase, db: &impl DefDatabase,
path: &Path, path: &ModPath,
) -> Option<(TypeNs, Option<usize>)> { ) -> Option<(TypeNs, Option<usize>)> {
if path.is_type_relative() { let first_name = path.segments.first()?;
return None;
}
let first_name = &path.segments.first()?.name;
let skip_to_mod = path.kind != PathKind::Plain; let skip_to_mod = path.kind != PathKind::Plain;
for scope in self.scopes.iter().rev() { for scope in self.scopes.iter().rev() {
match scope { match scope {
Scope::ExprScope(_) => continue, Scope::ExprScope(_) => continue,
Scope::GenericParams { .. } | Scope::ImplBlockScope(_) if skip_to_mod => continue, Scope::GenericParams { .. }
| Scope::ImplBlockScope(_)
| Scope::LocalItemsScope(_)
if skip_to_mod =>
{
continue
}
Scope::GenericParams { params, .. } => { Scope::GenericParams { params, def } => {
if let Some(param) = params.find_by_name(first_name) { if let Some(local_id) = params.find_by_name(first_name) {
let idx = if path.segments.len() == 1 { None } else { Some(1) }; let idx = if path.segments.len() == 1 { None } else { Some(1) };
return Some((TypeNs::GenericParam(param.idx), idx)); return Some((
TypeNs::GenericParam(TypeParamId { local_id, parent: *def }),
idx,
));
} }
} }
Scope::ImplBlockScope(impl_) => { Scope::ImplBlockScope(impl_) => {
if first_name == &name::SELF_TYPE { if first_name == &name![Self] {
let idx = if path.segments.len() == 1 { None } else { Some(1) }; let idx = if path.segments.len() == 1 { None } else { Some(1) };
return Some((TypeNs::SelfType(*impl_), idx)); return Some((TypeNs::SelfType(*impl_), idx));
} }
} }
Scope::AdtScope(adt) => { Scope::AdtScope(adt) => {
if first_name == &name::SELF_TYPE { if first_name == &name![Self] {
let idx = if path.segments.len() == 1 { None } else { Some(1) }; let idx = if path.segments.len() == 1 { None } else { Some(1) };
return Some((TypeNs::AdtSelfType(*adt), idx)); return Some((TypeNs::AdtSelfType(*adt), idx));
} }
} }
Scope::ModuleScope(m) => { Scope::ModuleScope(m) => {
let (module_def, idx) = m.crate_def_map.resolve_path(db, m.module_id, path); let (module_def, idx) = m.crate_def_map.resolve_path(
let res = match module_def.take_types()? { db,
ModuleDefId::AdtId(it) => TypeNs::AdtId(it), m.module_id,
ModuleDefId::EnumVariantId(it) => TypeNs::EnumVariantId(it), &path,
BuiltinShadowMode::Other,
ModuleDefId::TypeAliasId(it) => TypeNs::TypeAliasId(it), );
ModuleDefId::BuiltinType(it) => TypeNs::BuiltinType(it), let res = to_type_ns(module_def)?;
ModuleDefId::TraitId(it) => TypeNs::TraitId(it),
ModuleDefId::FunctionId(_)
| ModuleDefId::ConstId(_)
| ModuleDefId::StaticId(_)
| ModuleDefId::ModuleId(_) => return None,
};
return Some((res, idx)); return Some((res, idx));
} }
Scope::LocalItemsScope(body) => {
let def = body.item_scope.get(first_name, BuiltinShadowMode::Other);
if let Some(res) = to_type_ns(def) {
return Some((res, None));
}
}
} }
} }
None return None;
fn to_type_ns(per_ns: PerNs) -> Option<TypeNs> {
let res = match per_ns.take_types()? {
ModuleDefId::AdtId(it) => TypeNs::AdtId(it),
ModuleDefId::EnumVariantId(it) => TypeNs::EnumVariantId(it),
ModuleDefId::TypeAliasId(it) => TypeNs::TypeAliasId(it),
ModuleDefId::BuiltinType(it) => TypeNs::BuiltinType(it),
ModuleDefId::TraitId(it) => TypeNs::TraitId(it),
ModuleDefId::FunctionId(_)
| ModuleDefId::ConstId(_)
| ModuleDefId::StaticId(_)
| ModuleDefId::ModuleId(_) => return None,
};
Some(res)
}
} }
pub fn resolve_path_in_type_ns_fully( pub fn resolve_path_in_type_ns_fully(
&self, &self,
db: &impl DefDatabase, db: &impl DefDatabase,
path: &Path, path: &ModPath,
) -> Option<TypeNs> { ) -> Option<TypeNs> {
let (res, unresolved) = self.resolve_path_in_type_ns(db, path)?; let (res, unresolved) = self.resolve_path_in_type_ns(db, path)?;
if unresolved.is_some() { if unresolved.is_some() {
@ -197,17 +231,14 @@ impl Resolver {
Some(res) Some(res)
} }
pub fn resolve_path_in_value_ns<'p>( pub fn resolve_path_in_value_ns(
&self, &self,
db: &impl DefDatabase, db: &impl DefDatabase,
path: &'p Path, path: &ModPath,
) -> Option<ResolveValueResult> { ) -> Option<ResolveValueResult> {
if path.is_type_relative() {
return None;
}
let n_segments = path.segments.len(); let n_segments = path.segments.len();
let tmp = name::SELF_PARAM; let tmp = name![self];
let first_name = if path.is_self() { &tmp } else { &path.segments.first()?.name }; let first_name = if path.is_self() { &tmp } else { &path.segments.first()? };
let skip_to_mod = path.kind != PathKind::Plain && !path.is_self(); let skip_to_mod = path.kind != PathKind::Plain && !path.is_self();
for scope in self.scopes.iter().rev() { for scope in self.scopes.iter().rev() {
match scope { match scope {
@ -215,6 +246,7 @@ impl Resolver {
| Scope::ExprScope(_) | Scope::ExprScope(_)
| Scope::GenericParams { .. } | Scope::GenericParams { .. }
| Scope::ImplBlockScope(_) | Scope::ImplBlockScope(_)
| Scope::LocalItemsScope(_)
if skip_to_mod => if skip_to_mod =>
{ {
continue continue
@ -233,22 +265,22 @@ impl Resolver {
} }
Scope::ExprScope(_) => continue, Scope::ExprScope(_) => continue,
Scope::GenericParams { params, .. } if n_segments > 1 => { Scope::GenericParams { params, def } if n_segments > 1 => {
if let Some(param) = params.find_by_name(first_name) { if let Some(local_id) = params.find_by_name(first_name) {
let ty = TypeNs::GenericParam(param.idx); let ty = TypeNs::GenericParam(TypeParamId { local_id, parent: *def });
return Some(ResolveValueResult::Partial(ty, 1)); return Some(ResolveValueResult::Partial(ty, 1));
} }
} }
Scope::GenericParams { .. } => continue, Scope::GenericParams { .. } => continue,
Scope::ImplBlockScope(impl_) if n_segments > 1 => { Scope::ImplBlockScope(impl_) if n_segments > 1 => {
if first_name == &name::SELF_TYPE { if first_name == &name![Self] {
let ty = TypeNs::SelfType(*impl_); let ty = TypeNs::SelfType(*impl_);
return Some(ResolveValueResult::Partial(ty, 1)); return Some(ResolveValueResult::Partial(ty, 1));
} }
} }
Scope::AdtScope(adt) if n_segments > 1 => { Scope::AdtScope(adt) if n_segments > 1 => {
if first_name == &name::SELF_TYPE { if first_name == &name![Self] {
let ty = TypeNs::AdtSelfType(*adt); let ty = TypeNs::AdtSelfType(*adt);
return Some(ResolveValueResult::Partial(ty, 1)); return Some(ResolveValueResult::Partial(ty, 1));
} }
@ -256,23 +288,15 @@ impl Resolver {
Scope::ImplBlockScope(_) | Scope::AdtScope(_) => continue, Scope::ImplBlockScope(_) | Scope::AdtScope(_) => continue,
Scope::ModuleScope(m) => { Scope::ModuleScope(m) => {
let (module_def, idx) = m.crate_def_map.resolve_path(db, m.module_id, path); let (module_def, idx) = m.crate_def_map.resolve_path(
db,
m.module_id,
&path,
BuiltinShadowMode::Other,
);
return match idx { return match idx {
None => { None => {
let value = match module_def.take_values()? { let value = to_value_ns(module_def)?;
ModuleDefId::FunctionId(it) => ValueNs::FunctionId(it),
ModuleDefId::AdtId(AdtId::StructId(it)) => ValueNs::StructId(it),
ModuleDefId::EnumVariantId(it) => ValueNs::EnumVariantId(it),
ModuleDefId::ConstId(it) => ValueNs::ConstId(it),
ModuleDefId::StaticId(it) => ValueNs::StaticId(it),
ModuleDefId::AdtId(AdtId::EnumId(_))
| ModuleDefId::AdtId(AdtId::UnionId(_))
| ModuleDefId::TraitId(_)
| ModuleDefId::TypeAliasId(_)
| ModuleDefId::BuiltinType(_)
| ModuleDefId::ModuleId(_) => return None,
};
Some(ResolveValueResult::ValueNs(value)) Some(ResolveValueResult::ValueNs(value))
} }
Some(idx) => { Some(idx) => {
@ -292,15 +316,39 @@ impl Resolver {
} }
}; };
} }
Scope::LocalItemsScope(body) => {
let def = body.item_scope.get(first_name, BuiltinShadowMode::Other);
if let Some(res) = to_value_ns(def) {
return Some(ResolveValueResult::ValueNs(res));
}
}
} }
} }
None return None;
fn to_value_ns(per_ns: PerNs) -> Option<ValueNs> {
let res = match per_ns.take_values()? {
ModuleDefId::FunctionId(it) => ValueNs::FunctionId(it),
ModuleDefId::AdtId(AdtId::StructId(it)) => ValueNs::StructId(it),
ModuleDefId::EnumVariantId(it) => ValueNs::EnumVariantId(it),
ModuleDefId::ConstId(it) => ValueNs::ConstId(it),
ModuleDefId::StaticId(it) => ValueNs::StaticId(it),
ModuleDefId::AdtId(AdtId::EnumId(_))
| ModuleDefId::AdtId(AdtId::UnionId(_))
| ModuleDefId::TraitId(_)
| ModuleDefId::TypeAliasId(_)
| ModuleDefId::BuiltinType(_)
| ModuleDefId::ModuleId(_) => return None,
};
Some(res)
}
} }
pub fn resolve_path_in_value_ns_fully( pub fn resolve_path_in_value_ns_fully(
&self, &self,
db: &impl DefDatabase, db: &impl DefDatabase,
path: &Path, path: &ModPath,
) -> Option<ValueNs> { ) -> Option<ValueNs> {
match self.resolve_path_in_value_ns(db, path)? { match self.resolve_path_in_value_ns(db, path)? {
ResolveValueResult::ValueNs(it) => Some(it), ResolveValueResult::ValueNs(it) => Some(it),
@ -308,9 +356,13 @@ impl Resolver {
} }
} }
pub fn resolve_path_as_macro(&self, db: &impl DefDatabase, path: &Path) -> Option<MacroDefId> { pub fn resolve_path_as_macro(
&self,
db: &impl DefDatabase,
path: &ModPath,
) -> Option<MacroDefId> {
let (item_map, module) = self.module()?; let (item_map, module) = self.module()?;
item_map.resolve_path(db, module, path).0.take_macros() item_map.resolve_path(db, module, &path, BuiltinShadowMode::Other).0.take_macros()
} }
pub fn process_all_names(&self, db: &impl DefDatabase, f: &mut dyn FnMut(Name, ScopeDef)) { pub fn process_all_names(&self, db: &impl DefDatabase, f: &mut dyn FnMut(Name, ScopeDef)) {
@ -350,6 +402,7 @@ impl Resolver {
) -> impl Iterator<Item = &'a crate::generics::WherePredicate> + 'a { ) -> impl Iterator<Item = &'a crate::generics::WherePredicate> + 'a {
self.scopes self.scopes
.iter() .iter()
.rev()
.filter_map(|scope| match scope { .filter_map(|scope| match scope {
Scope::GenericParams { params, .. } => Some(params), Scope::GenericParams { params, .. } => Some(params),
_ => None, _ => None,
@ -358,14 +411,14 @@ impl Resolver {
} }
pub fn generic_def(&self) -> Option<GenericDefId> { pub fn generic_def(&self) -> Option<GenericDefId> {
self.scopes.iter().find_map(|scope| match scope { self.scopes.iter().rev().find_map(|scope| match scope {
Scope::GenericParams { def, .. } => Some(*def), Scope::GenericParams { def, .. } => Some(*def),
_ => None, _ => None,
}) })
} }
pub fn body_owner(&self) -> Option<DefWithBodyId> { pub fn body_owner(&self) -> Option<DefWithBodyId> {
self.scopes.iter().find_map(|scope| match scope { self.scopes.iter().rev().find_map(|scope| match scope {
Scope::ExprScope(it) => Some(it.owner), Scope::ExprScope(it) => Some(it.owner),
_ => None, _ => None,
}) })
@ -376,7 +429,7 @@ pub enum ScopeDef {
PerNs(PerNs), PerNs(PerNs),
ImplSelfType(ImplId), ImplSelfType(ImplId),
AdtSelfType(AdtId), AdtSelfType(AdtId),
GenericParam(u32), GenericParam(TypeParamId),
Local(PatId), Local(PatId),
} }
@ -391,8 +444,8 @@ impl Scope {
// def: m.module.into(), // def: m.module.into(),
// }), // }),
// ); // );
m.crate_def_map[m.module_id].scope.entries().for_each(|(name, res)| { m.crate_def_map[m.module_id].scope.entries().for_each(|(name, def)| {
f(name.clone(), ScopeDef::PerNs(res.def)); f(name.clone(), ScopeDef::PerNs(def));
}); });
m.crate_def_map[m.module_id].scope.legacy_macros().for_each(|(name, macro_)| { m.crate_def_map[m.module_id].scope.legacy_macros().for_each(|(name, macro_)| {
f(name.clone(), ScopeDef::PerNs(PerNs::macros(macro_))); f(name.clone(), ScopeDef::PerNs(PerNs::macros(macro_)));
@ -402,21 +455,29 @@ impl Scope {
}); });
if let Some(prelude) = m.crate_def_map.prelude { if let Some(prelude) = m.crate_def_map.prelude {
let prelude_def_map = db.crate_def_map(prelude.krate); let prelude_def_map = db.crate_def_map(prelude.krate);
prelude_def_map[prelude.local_id].scope.entries().for_each(|(name, res)| { prelude_def_map[prelude.local_id].scope.entries().for_each(|(name, def)| {
f(name.clone(), ScopeDef::PerNs(res.def)); f(name.clone(), ScopeDef::PerNs(def));
}); });
} }
} }
Scope::GenericParams { params, .. } => { Scope::LocalItemsScope(body) => {
for param in params.params.iter() { body.item_scope.entries_without_primitives().for_each(|(name, def)| {
f(param.name.clone(), ScopeDef::GenericParam(param.idx)) f(name.clone(), ScopeDef::PerNs(def));
})
}
Scope::GenericParams { params, def } => {
for (local_id, param) in params.types.iter() {
f(
param.name.clone(),
ScopeDef::GenericParam(TypeParamId { local_id, parent: *def }),
)
} }
} }
Scope::ImplBlockScope(i) => { Scope::ImplBlockScope(i) => {
f(name::SELF_TYPE, ScopeDef::ImplSelfType((*i).into())); f(name![Self], ScopeDef::ImplSelfType((*i).into()));
} }
Scope::AdtScope(i) => { Scope::AdtScope(i) => {
f(name::SELF_TYPE, ScopeDef::AdtSelfType((*i).into())); f(name![Self], ScopeDef::AdtSelfType((*i).into()));
} }
Scope::ExprScope(scope) => { Scope::ExprScope(scope) => {
scope.expr_scopes.entries(scope.scope_id).iter().for_each(|e| { scope.expr_scopes.entries(scope.scope_id).iter().for_each(|e| {
@ -439,6 +500,7 @@ pub fn resolver_for_scope(
scope_id: Option<ScopeId>, scope_id: Option<ScopeId>,
) -> Resolver { ) -> Resolver {
let mut r = owner.resolver(db); let mut r = owner.resolver(db);
r = r.push_local_items_scope(db.body(owner));
let scopes = db.expr_scopes(owner); let scopes = db.expr_scopes(owner);
let scope_chain = scopes.scope_chain(scope_id).collect::<Vec<_>>(); let scope_chain = scopes.scope_chain(scope_id).collect::<Vec<_>>();
for scope in scope_chain.into_iter().rev() { for scope in scope_chain.into_iter().rev() {
@ -455,7 +517,7 @@ impl Resolver {
fn push_generic_params_scope(self, db: &impl DefDatabase, def: GenericDefId) -> Resolver { fn push_generic_params_scope(self, db: &impl DefDatabase, def: GenericDefId) -> Resolver {
let params = db.generic_params(def); let params = db.generic_params(def);
if params.params.is_empty() { if params.types.is_empty() {
self self
} else { } else {
self.push_scope(Scope::GenericParams { def, params }) self.push_scope(Scope::GenericParams { def, params })
@ -474,6 +536,10 @@ impl Resolver {
self.push_scope(Scope::ModuleScope(ModuleItemMap { crate_def_map, module_id })) self.push_scope(Scope::ModuleScope(ModuleItemMap { crate_def_map, module_id }))
} }
fn push_local_items_scope(self, body: Arc<Body>) -> Resolver {
self.push_scope(Scope::LocalItemsScope(body))
}
fn push_expr_scope( fn push_expr_scope(
self, self,
owner: DefWithBodyId, owner: DefWithBodyId,
@ -498,7 +564,7 @@ impl HasResolver for ModuleId {
impl HasResolver for TraitId { impl HasResolver for TraitId {
fn resolver(self, db: &impl DefDatabase) -> Resolver { fn resolver(self, db: &impl DefDatabase) -> Resolver {
self.module(db).resolver(db).push_generic_params_scope(db, self.into()) self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into())
} }
} }
@ -518,16 +584,6 @@ impl HasResolver for FunctionId {
} }
} }
impl HasResolver for DefWithBodyId {
fn resolver(self, db: &impl DefDatabase) -> Resolver {
match self {
DefWithBodyId::ConstId(c) => c.resolver(db),
DefWithBodyId::FunctionId(f) => f.resolver(db),
DefWithBodyId::StaticId(s) => s.resolver(db),
}
}
}
impl HasResolver for ConstId { impl HasResolver for ConstId {
fn resolver(self, db: &impl DefDatabase) -> Resolver { fn resolver(self, db: &impl DefDatabase) -> Resolver {
self.lookup(db).container.resolver(db) self.lookup(db).container.resolver(db)
@ -546,12 +602,41 @@ impl HasResolver for TypeAliasId {
} }
} }
impl HasResolver for ImplId {
fn resolver(self, db: &impl DefDatabase) -> Resolver {
self.lookup(db)
.container
.resolver(db)
.push_generic_params_scope(db, self.into())
.push_impl_block_scope(self)
}
}
impl HasResolver for DefWithBodyId {
fn resolver(self, db: &impl DefDatabase) -> Resolver {
match self {
DefWithBodyId::ConstId(c) => c.resolver(db),
DefWithBodyId::FunctionId(f) => f.resolver(db),
DefWithBodyId::StaticId(s) => s.resolver(db),
}
}
}
impl HasResolver for ContainerId { impl HasResolver for ContainerId {
fn resolver(self, db: &impl DefDatabase) -> Resolver { fn resolver(self, db: &impl DefDatabase) -> Resolver {
match self { match self {
ContainerId::TraitId(it) => it.resolver(db),
ContainerId::ImplId(it) => it.resolver(db),
ContainerId::ModuleId(it) => it.resolver(db), ContainerId::ModuleId(it) => it.resolver(db),
ContainerId::DefWithBodyId(it) => it.resolver(db),
}
}
}
impl HasResolver for AssocContainerId {
fn resolver(self, db: &impl DefDatabase) -> Resolver {
match self {
AssocContainerId::ContainerId(it) => it.resolver(db),
AssocContainerId::TraitId(it) => it.resolver(db),
AssocContainerId::ImplId(it) => it.resolver(db),
} }
} }
} }
@ -570,11 +655,12 @@ impl HasResolver for GenericDefId {
} }
} }
impl HasResolver for ImplId { impl HasResolver for VariantId {
fn resolver(self, db: &impl DefDatabase) -> Resolver { fn resolver(self, db: &impl DefDatabase) -> Resolver {
self.module(db) match self {
.resolver(db) VariantId::EnumVariantId(it) => it.parent.resolver(db),
.push_generic_params_scope(db, self.into()) VariantId::StructId(it) => it.resolver(db),
.push_impl_block_scope(self) VariantId::UnionId(it) => it.resolver(db),
}
} }
} }

View file

@ -0,0 +1,36 @@
//! Utilities for mapping between hir IDs and the surface syntax.
use hir_expand::InFile;
use ra_arena::map::ArenaMap;
use ra_syntax::AstNode;
use crate::{db::DefDatabase, AssocItemLoc, ItemLoc};
pub trait HasSource {
type Value;
fn source(&self, db: &impl DefDatabase) -> InFile<Self::Value>;
}
impl<N: AstNode> HasSource for AssocItemLoc<N> {
type Value = N;
fn source(&self, db: &impl DefDatabase) -> InFile<N> {
let node = self.ast_id.to_node(db);
InFile::new(self.ast_id.file_id, node)
}
}
impl<N: AstNode> HasSource for ItemLoc<N> {
type Value = N;
fn source(&self, db: &impl DefDatabase) -> InFile<N> {
let node = self.ast_id.to_node(db);
InFile::new(self.ast_id.file_id, node)
}
}
pub trait HasChildSource {
type ChildId;
type Value;
fn child_source(&self, db: &impl DefDatabase) -> InFile<ArenaMap<Self::ChildId, Self::Value>>;
}

View file

@ -18,10 +18,6 @@ pub(crate) struct Trace<ID: ArenaId, T, V> {
} }
impl<ID: ra_arena::ArenaId + Copy, T, V> Trace<ID, T, V> { impl<ID: ra_arena::ArenaId + Copy, T, V> Trace<ID, T, V> {
pub(crate) fn new() -> Trace<ID, T, V> {
Trace { arena: Some(Arena::default()), map: Some(ArenaMap::default()), len: 0 }
}
pub(crate) fn new_for_arena() -> Trace<ID, T, V> { pub(crate) fn new_for_arena() -> Trace<ID, T, V> {
Trace { arena: Some(Arena::default()), map: None, len: 0 } Trace { arena: Some(Arena::default()), map: None, len: 0 }
} }
@ -52,8 +48,4 @@ impl<ID: ra_arena::ArenaId + Copy, T, V> Trace<ID, T, V> {
pub(crate) fn into_map(mut self) -> ArenaMap<ID, V> { pub(crate) fn into_map(mut self) -> ArenaMap<ID, V> {
self.map.take().unwrap() self.map.take().unwrap()
} }
pub(crate) fn into_arena_and_map(mut self) -> (Arena<ID, T>, ArenaMap<ID, V>) {
(self.arena.take().unwrap(), self.map.take().unwrap())
}
} }

View file

@ -9,6 +9,7 @@ doctest = false
[dependencies] [dependencies]
log = "0.4.5" log = "0.4.5"
either = "1.5"
ra_arena = { path = "../ra_arena" } ra_arena = { path = "../ra_arena" }
ra_db = { path = "../ra_db" } ra_db = { path = "../ra_db" }

View file

@ -39,6 +39,16 @@ impl<N: AstNode> Hash for FileAstId<N> {
} }
} }
impl<N: AstNode> FileAstId<N> {
// Can't make this a From implementation because of coherence
pub fn upcast<M: AstNode>(self) -> FileAstId<M>
where
M: From<N>,
{
FileAstId { raw: self.raw, _ty: PhantomData }
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
struct ErasedFileAstId(RawId); struct ErasedFileAstId(RawId);
impl_arena_id!(ErasedFileAstId); impl_arena_id!(ErasedFileAstId);
@ -53,7 +63,7 @@ impl AstIdMap {
pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap { pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
assert!(node.parent().is_none()); assert!(node.parent().is_none());
let mut res = AstIdMap { arena: Arena::default() }; let mut res = AstIdMap { arena: Arena::default() };
// By walking the tree in bread-first order we make sure that parents // By walking the tree in breadth-first order we make sure that parents
// get lower ids then children. That is, adding a new child does not // get lower ids then children. That is, adding a new child does not
// change parent's id. This means that, say, adding a new function to a // change parent's id. This means that, say, adding a new function to a
// trait does not change ids of top-level items, which helps caching. // trait does not change ids of top-level items, which helps caching.

View file

@ -0,0 +1,321 @@
//! Builtin derives.
use log::debug;
use ra_parser::FragmentKind;
use ra_syntax::{
ast::{self, AstNode, ModuleItemOwner, NameOwner, TypeParamsOwner},
match_ast,
};
use crate::db::AstDatabase;
use crate::{name, quote, MacroCallId, MacroDefId, MacroDefKind};
macro_rules! register_builtin {
( $($trait:ident => $expand:ident),* ) => {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum BuiltinDeriveExpander {
$($trait),*
}
impl BuiltinDeriveExpander {
pub fn expand(
&self,
db: &dyn AstDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> {
let expander = match *self {
$( BuiltinDeriveExpander::$trait => $expand, )*
};
expander(db, id, tt)
}
}
pub fn find_builtin_derive(ident: &name::Name) -> Option<MacroDefId> {
let kind = match ident {
$( id if id == &name::name![$trait] => BuiltinDeriveExpander::$trait, )*
_ => return None,
};
Some(MacroDefId { krate: None, ast_id: None, kind: MacroDefKind::BuiltInDerive(kind) })
}
};
}
register_builtin! {
Copy => copy_expand,
Clone => clone_expand,
Default => default_expand,
Debug => debug_expand,
Hash => hash_expand,
Ord => ord_expand,
PartialOrd => partial_ord_expand,
Eq => eq_expand,
PartialEq => partial_eq_expand
}
struct BasicAdtInfo {
name: tt::Ident,
type_params: usize,
}
fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, mbe::ExpandError> {
let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, FragmentKind::Items)?; // FragmentKind::Items doesn't parse attrs?
let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| {
debug!("derive node didn't parse");
mbe::ExpandError::UnexpectedToken
})?;
let item = macro_items.items().next().ok_or_else(|| {
debug!("no module item parsed");
mbe::ExpandError::NoMatchingRule
})?;
let node = item.syntax();
let (name, params) = match_ast! {
match node {
ast::StructDef(it) => { (it.name(), it.type_param_list()) },
ast::EnumDef(it) => { (it.name(), it.type_param_list()) },
ast::UnionDef(it) => { (it.name(), it.type_param_list()) },
_ => {
debug!("unexpected node is {:?}", node);
return Err(mbe::ExpandError::ConversionError)
},
}
};
let name = name.ok_or_else(|| {
debug!("parsed item has no name");
mbe::ExpandError::NoMatchingRule
})?;
let name_token_id = token_map.token_by_range(name.syntax().text_range()).ok_or_else(|| {
debug!("name token not found");
mbe::ExpandError::ConversionError
})?;
let name_token = tt::Ident { id: name_token_id, text: name.text().clone() };
let type_params = params.map_or(0, |type_param_list| type_param_list.type_params().count());
Ok(BasicAdtInfo { name: name_token, type_params })
}
fn make_type_args(n: usize, bound: Vec<tt::TokenTree>) -> Vec<tt::TokenTree> {
let mut result = Vec::<tt::TokenTree>::new();
result.push(
tt::Leaf::Punct(tt::Punct {
char: '<',
spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
})
.into(),
);
for i in 0..n {
if i > 0 {
result.push(
tt::Leaf::Punct(tt::Punct {
char: ',',
spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
})
.into(),
);
}
result.push(
tt::Leaf::Ident(tt::Ident {
id: tt::TokenId::unspecified(),
text: format!("T{}", i).into(),
})
.into(),
);
result.extend(bound.iter().cloned());
}
result.push(
tt::Leaf::Punct(tt::Punct {
char: '>',
spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
})
.into(),
);
result
}
fn expand_simple_derive(
tt: &tt::Subtree,
trait_path: tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> {
let info = parse_adt(tt)?;
let name = info.name;
let trait_path_clone = trait_path.token_trees.clone();
let bound = (quote! { : ##trait_path_clone }).token_trees;
let type_params = make_type_args(info.type_params, bound);
let type_args = make_type_args(info.type_params, Vec::new());
let trait_path = trait_path.token_trees;
let expanded = quote! {
impl ##type_params ##trait_path for #name ##type_args {}
};
Ok(expanded)
}
fn copy_expand(
_db: &dyn AstDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> {
expand_simple_derive(tt, quote! { std::marker::Copy })
}
fn clone_expand(
_db: &dyn AstDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> {
expand_simple_derive(tt, quote! { std::clone::Clone })
}
fn default_expand(
_db: &dyn AstDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> {
expand_simple_derive(tt, quote! { std::default::Default })
}
fn debug_expand(
_db: &dyn AstDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> {
expand_simple_derive(tt, quote! { std::fmt::Debug })
}
fn hash_expand(
_db: &dyn AstDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> {
expand_simple_derive(tt, quote! { std::hash::Hash })
}
fn eq_expand(
_db: &dyn AstDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> {
expand_simple_derive(tt, quote! { std::cmp::Eq })
}
fn partial_eq_expand(
_db: &dyn AstDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> {
expand_simple_derive(tt, quote! { std::cmp::PartialEq })
}
fn ord_expand(
_db: &dyn AstDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> {
expand_simple_derive(tt, quote! { std::cmp::Ord })
}
fn partial_ord_expand(
_db: &dyn AstDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> {
expand_simple_derive(tt, quote! { std::cmp::PartialOrd })
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{test_db::TestDB, AstId, MacroCallKind, MacroCallLoc};
use ra_db::{fixture::WithFixture, SourceDatabase};
fn expand_builtin_derive(s: &str, expander: BuiltinDeriveExpander) -> String {
let (db, file_id) = TestDB::with_single_file(&s);
let parsed = db.parse(file_id);
let items: Vec<_> =
parsed.syntax_node().descendants().filter_map(|it| ast::ModuleItem::cast(it)).collect();
let ast_id_map = db.ast_id_map(file_id.into());
// the first one should be a macro_rules
let def =
MacroDefId { krate: None, ast_id: None, kind: MacroDefKind::BuiltInDerive(expander) };
let loc = MacroCallLoc {
def,
kind: MacroCallKind::Attr(AstId::new(file_id.into(), ast_id_map.ast_id(&items[0]))),
};
let id = db.intern_macro(loc);
let parsed = db.parse_or_expand(id.as_file()).unwrap();
// FIXME text() for syntax nodes parsed from token tree looks weird
// because there's no whitespace, see below
parsed.text().to_string()
}
#[test]
fn test_copy_expand_simple() {
let expanded = expand_builtin_derive(
r#"
#[derive(Copy)]
struct Foo;
"#,
BuiltinDeriveExpander::Copy,
);
assert_eq!(expanded, "impl <>std::marker::CopyforFoo <>{}");
}
#[test]
fn test_copy_expand_with_type_params() {
let expanded = expand_builtin_derive(
r#"
#[derive(Copy)]
struct Foo<A, B>;
"#,
BuiltinDeriveExpander::Copy,
);
assert_eq!(
expanded,
"impl<T0:std::marker::Copy,T1:std::marker::Copy>std::marker::CopyforFoo<T0,T1>{}"
);
}
#[test]
fn test_copy_expand_with_lifetimes() {
let expanded = expand_builtin_derive(
r#"
#[derive(Copy)]
struct Foo<A, B, 'a, 'b>;
"#,
BuiltinDeriveExpander::Copy,
);
// We currently just ignore lifetimes
assert_eq!(
expanded,
"impl<T0:std::marker::Copy,T1:std::marker::Copy>std::marker::CopyforFoo<T0,T1>{}"
);
}
#[test]
fn test_clone_expand() {
let expanded = expand_builtin_derive(
r#"
#[derive(Clone)]
struct Foo<A, B>;
"#,
BuiltinDeriveExpander::Clone,
);
assert_eq!(
expanded,
"impl<T0:std::clone::Clone,T1:std::clone::Clone>std::clone::CloneforFoo<T0,T1>{}"
);
}
}

View file

@ -2,8 +2,7 @@
use crate::db::AstDatabase; use crate::db::AstDatabase;
use crate::{ use crate::{
ast::{self, AstNode}, ast::{self, AstNode},
name, AstId, CrateId, HirFileId, MacroCallId, MacroDefId, MacroDefKind, MacroFileKind, name, AstId, CrateId, HirFileId, MacroCallId, MacroDefId, MacroDefKind, TextUnit,
TextUnit,
}; };
use crate::quote; use crate::quote;
@ -27,6 +26,13 @@ macro_rules! register_builtin {
}; };
expander(db, id, tt) expander(db, id, tt)
} }
fn by_name(ident: &name::Name) -> Option<BuiltinFnLikeExpander> {
match ident {
$( id if id == &name::name![$name] => Some(BuiltinFnLikeExpander::$kind), )*
_ => return None,
}
}
} }
pub fn find_builtin_macro( pub fn find_builtin_macro(
@ -34,22 +40,25 @@ macro_rules! register_builtin {
krate: CrateId, krate: CrateId,
ast_id: AstId<ast::MacroCall>, ast_id: AstId<ast::MacroCall>,
) -> Option<MacroDefId> { ) -> Option<MacroDefId> {
let kind = match ident { let kind = BuiltinFnLikeExpander::by_name(ident)?;
$( id if id == &name::$name => BuiltinFnLikeExpander::$kind, )*
_ => return None,
};
Some(MacroDefId { krate, ast_id, kind: MacroDefKind::BuiltIn(kind) }) Some(MacroDefId { krate: Some(krate), ast_id: Some(ast_id), kind: MacroDefKind::BuiltIn(kind) })
} }
}; };
} }
register_builtin! { register_builtin! {
(COLUMN_MACRO, Column) => column_expand, (column, Column) => column_expand,
(COMPILE_ERROR_MACRO, CompileError) => compile_error_expand, (compile_error, CompileError) => compile_error_expand,
(FILE_MACRO, File) => file_expand, (file, File) => file_expand,
(LINE_MACRO, Line) => line_expand, (line, Line) => line_expand,
(STRINGIFY_MACRO, Stringify) => stringify_expand (stringify, Stringify) => stringify_expand,
(format_args, FormatArgs) => format_args_expand,
(env, Env) => env_expand,
(option_env, OptionEnv) => option_env_expand,
// format_args_nl only differs in that it adds a newline in the end,
// so we use the same stub expansion for now
(format_args_nl, FormatArgsNl) => format_args_expand
} }
fn to_line_number(db: &dyn AstDatabase, file: HirFileId, pos: TextUnit) -> usize { fn to_line_number(db: &dyn AstDatabase, file: HirFileId, pos: TextUnit) -> usize {
@ -82,12 +91,11 @@ fn line_expand(
_tt: &tt::Subtree, _tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> { ) -> Result<tt::Subtree, mbe::ExpandError> {
let loc = db.lookup_intern_macro(id); let loc = db.lookup_intern_macro(id);
let macro_call = loc.ast_id.to_node(db);
let arg = macro_call.token_tree().ok_or_else(|| mbe::ExpandError::UnexpectedToken)?; let arg = loc.kind.arg(db).ok_or_else(|| mbe::ExpandError::UnexpectedToken)?;
let arg_start = arg.syntax().text_range().start(); let arg_start = arg.text_range().start();
let file = id.as_file(MacroFileKind::Expr); let file = id.as_file();
let line_num = to_line_number(db, file, arg_start); let line_num = to_line_number(db, file, arg_start);
let expanded = quote! { let expanded = quote! {
@ -103,11 +111,10 @@ fn stringify_expand(
_tt: &tt::Subtree, _tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> { ) -> Result<tt::Subtree, mbe::ExpandError> {
let loc = db.lookup_intern_macro(id); let loc = db.lookup_intern_macro(id);
let macro_call = loc.ast_id.to_node(db);
let macro_content = { let macro_content = {
let arg = macro_call.token_tree().ok_or_else(|| mbe::ExpandError::UnexpectedToken)?; let arg = loc.kind.arg(db).ok_or_else(|| mbe::ExpandError::UnexpectedToken)?;
let macro_args = arg.syntax().clone(); let macro_args = arg;
let text = macro_args.text(); let text = macro_args.text();
let without_parens = TextUnit::of_char('(')..text.len() - TextUnit::of_char(')'); let without_parens = TextUnit::of_char('(')..text.len() - TextUnit::of_char(')');
text.slice(without_parens).to_string() text.slice(without_parens).to_string()
@ -120,6 +127,28 @@ fn stringify_expand(
Ok(expanded) Ok(expanded)
} }
fn env_expand(
_db: &dyn AstDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> {
// dummy implementation for type-checking purposes
let expanded = quote! { "" };
Ok(expanded)
}
fn option_env_expand(
_db: &dyn AstDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> {
// dummy implementation for type-checking purposes
let expanded = quote! { std::option::Option::None::<&str> };
Ok(expanded)
}
fn to_col_number(db: &dyn AstDatabase, file: HirFileId, pos: TextUnit) -> usize { fn to_col_number(db: &dyn AstDatabase, file: HirFileId, pos: TextUnit) -> usize {
// FIXME: Use expansion info // FIXME: Use expansion info
let file_id = file.original_file(db); let file_id = file.original_file(db);
@ -137,7 +166,7 @@ fn to_col_number(db: &dyn AstDatabase, file: HirFileId, pos: TextUnit) -> usize
if c == '\n' { if c == '\n' {
break; break;
} }
col_num = col_num + 1; col_num += 1;
} }
col_num col_num
} }
@ -148,12 +177,15 @@ fn column_expand(
_tt: &tt::Subtree, _tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> { ) -> Result<tt::Subtree, mbe::ExpandError> {
let loc = db.lookup_intern_macro(id); let loc = db.lookup_intern_macro(id);
let macro_call = loc.ast_id.to_node(db); let macro_call = match loc.kind {
crate::MacroCallKind::FnLike(ast_id) => ast_id.to_node(db),
_ => panic!("column macro called as attr"),
};
let _arg = macro_call.token_tree().ok_or_else(|| mbe::ExpandError::UnexpectedToken)?; let _arg = macro_call.token_tree().ok_or_else(|| mbe::ExpandError::UnexpectedToken)?;
let col_start = macro_call.syntax().text_range().start(); let col_start = macro_call.syntax().text_range().start();
let file = id.as_file(MacroFileKind::Expr); let file = id.as_file();
let col_num = to_col_number(db, file, col_start); let col_num = to_col_number(db, file, col_start);
let expanded = quote! { let expanded = quote! {
@ -164,15 +196,10 @@ fn column_expand(
} }
fn file_expand( fn file_expand(
db: &dyn AstDatabase, _db: &dyn AstDatabase,
id: MacroCallId, _id: MacroCallId,
_tt: &tt::Subtree, _tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> { ) -> Result<tt::Subtree, mbe::ExpandError> {
let loc = db.lookup_intern_macro(id);
let macro_call = loc.ast_id.to_node(db);
let _ = macro_call.token_tree().ok_or_else(|| mbe::ExpandError::UnexpectedToken)?;
// FIXME: RA purposefully lacks knowledge of absolute file names // FIXME: RA purposefully lacks knowledge of absolute file names
// so just return "". // so just return "".
let file_name = ""; let file_name = "";
@ -204,13 +231,56 @@ fn compile_error_expand(
Err(mbe::ExpandError::BindingError("Must be a string".into())) Err(mbe::ExpandError::BindingError("Must be a string".into()))
} }
fn format_args_expand(
_db: &dyn AstDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> Result<tt::Subtree, mbe::ExpandError> {
// We expand `format_args!("", a1, a2)` to
// ```
// std::fmt::Arguments::new_v1(&[], &[
// std::fmt::ArgumentV1::new(&arg1,std::fmt::Display::fmt),
// std::fmt::ArgumentV1::new(&arg2,std::fmt::Display::fmt),
// ])
// ```,
// which is still not really correct, but close enough for now
let mut args = Vec::new();
let mut current = Vec::new();
for tt in tt.token_trees.iter().cloned() {
match tt {
tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ',' => {
args.push(current);
current = Vec::new();
}
_ => {
current.push(tt);
}
}
}
if !current.is_empty() {
args.push(current);
}
if args.is_empty() {
return Err(mbe::ExpandError::NoMatchingRule);
}
let _format_string = args.remove(0);
let arg_tts = args.into_iter().flat_map(|arg| {
quote! { std::fmt::ArgumentV1::new(&(##arg), std::fmt::Display::fmt), }
}.token_trees).collect::<Vec<_>>();
let expanded = quote! {
std::fmt::Arguments::new_v1(&[], &[##arg_tts])
};
Ok(expanded)
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::{test_db::TestDB, MacroCallLoc}; use crate::{name::AsName, test_db::TestDB, MacroCallKind, MacroCallLoc};
use ra_db::{fixture::WithFixture, SourceDatabase}; use ra_db::{fixture::WithFixture, SourceDatabase};
use ra_syntax::ast::NameOwner;
fn expand_builtin_macro(s: &str, expander: BuiltinFnLikeExpander) -> String { fn expand_builtin_macro(s: &str) -> String {
let (db, file_id) = TestDB::with_single_file(&s); let (db, file_id) = TestDB::with_single_file(&s);
let parsed = db.parse(file_id); let parsed = db.parse(file_id);
let macro_calls: Vec<_> = let macro_calls: Vec<_> =
@ -218,20 +288,26 @@ mod tests {
let ast_id_map = db.ast_id_map(file_id.into()); let ast_id_map = db.ast_id_map(file_id.into());
let expander =
BuiltinFnLikeExpander::by_name(&macro_calls[0].name().unwrap().as_name()).unwrap();
// the first one should be a macro_rules // the first one should be a macro_rules
let def = MacroDefId { let def = MacroDefId {
krate: CrateId(0), krate: Some(CrateId(0)),
ast_id: AstId::new(file_id.into(), ast_id_map.ast_id(&macro_calls[0])), ast_id: Some(AstId::new(file_id.into(), ast_id_map.ast_id(&macro_calls[0]))),
kind: MacroDefKind::BuiltIn(expander), kind: MacroDefKind::BuiltIn(expander),
}; };
let loc = MacroCallLoc { let loc = MacroCallLoc {
def, def,
ast_id: AstId::new(file_id.into(), ast_id_map.ast_id(&macro_calls[1])), kind: MacroCallKind::FnLike(AstId::new(
file_id.into(),
ast_id_map.ast_id(&macro_calls[1]),
)),
}; };
let id = db.intern_macro(loc); let id = db.intern_macro(loc);
let parsed = db.parse_or_expand(id.as_file(MacroFileKind::Expr)).unwrap(); let parsed = db.parse_or_expand(id.as_file()).unwrap();
parsed.text().to_string() parsed.text().to_string()
} }
@ -240,25 +316,23 @@ mod tests {
fn test_column_expand() { fn test_column_expand() {
let expanded = expand_builtin_macro( let expanded = expand_builtin_macro(
r#" r#"
#[rustc_builtin_macro] #[rustc_builtin_macro]
macro_rules! column {() => {}} macro_rules! column {() => {}}
column!() column!()
"#, "#,
BuiltinFnLikeExpander::Column,
); );
assert_eq!(expanded, "9"); assert_eq!(expanded, "13");
} }
#[test] #[test]
fn test_line_expand() { fn test_line_expand() {
let expanded = expand_builtin_macro( let expanded = expand_builtin_macro(
r#" r#"
#[rustc_builtin_macro] #[rustc_builtin_macro]
macro_rules! line {() => {}} macro_rules! line {() => {}}
line!() line!()
"#, "#,
BuiltinFnLikeExpander::Line,
); );
assert_eq!(expanded, "4"); assert_eq!(expanded, "4");
@ -268,25 +342,49 @@ mod tests {
fn test_stringify_expand() { fn test_stringify_expand() {
let expanded = expand_builtin_macro( let expanded = expand_builtin_macro(
r#" r#"
#[rustc_builtin_macro] #[rustc_builtin_macro]
macro_rules! stringify {() => {}} macro_rules! stringify {() => {}}
stringify!(a b c) stringify!(a b c)
"#, "#,
BuiltinFnLikeExpander::Stringify,
); );
assert_eq!(expanded, "\"a b c\""); assert_eq!(expanded, "\"a b c\"");
} }
#[test]
fn test_env_expand() {
let expanded = expand_builtin_macro(
r#"
#[rustc_builtin_macro]
macro_rules! env {() => {}}
env!("TEST_ENV_VAR")
"#,
);
assert_eq!(expanded, "\"\"");
}
#[test]
fn test_option_env_expand() {
let expanded = expand_builtin_macro(
r#"
#[rustc_builtin_macro]
macro_rules! option_env {() => {}}
option_env!("TEST_ENV_VAR")
"#,
);
assert_eq!(expanded, "std::option::Option::None:: <&str>");
}
#[test] #[test]
fn test_file_expand() { fn test_file_expand() {
let expanded = expand_builtin_macro( let expanded = expand_builtin_macro(
r#" r#"
#[rustc_builtin_macro] #[rustc_builtin_macro]
macro_rules! file {() => {}} macro_rules! file {() => {}}
file!() file!()
"#, "#,
BuiltinFnLikeExpander::File,
); );
assert_eq!(expanded, "\"\""); assert_eq!(expanded, "\"\"");
@ -296,16 +394,34 @@ mod tests {
fn test_compile_error_expand() { fn test_compile_error_expand() {
let expanded = expand_builtin_macro( let expanded = expand_builtin_macro(
r#" r#"
#[rustc_builtin_macro] #[rustc_builtin_macro]
macro_rules! compile_error { macro_rules! compile_error {
($msg:expr) => ({ /* compiler built-in */ }); ($msg:expr) => ({ /* compiler built-in */ });
($msg:expr,) => ({ /* compiler built-in */ }) ($msg:expr,) => ({ /* compiler built-in */ })
} }
compile_error!("error!"); compile_error!("error!");
"#, "#,
BuiltinFnLikeExpander::CompileError,
); );
assert_eq!(expanded, r#"loop{"error!"}"#); assert_eq!(expanded, r#"loop{"error!"}"#);
} }
#[test]
fn test_format_args_expand() {
let expanded = expand_builtin_macro(
r#"
#[rustc_builtin_macro]
macro_rules! format_args {
($fmt:expr) => ({ /* compiler built-in */ });
($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
}
format_args!("{} {:?}", arg1(a, b, c), arg2);
"#,
);
assert_eq!(
expanded,
r#"std::fmt::Arguments::new_v1(&[] ,&[std::fmt::ArgumentV1::new(&(arg1(a,b,c)),std::fmt::Display::fmt),std::fmt::ArgumentV1::new(&(arg2),std::fmt::Display::fmt),])"#
);
}
} }

View file

@ -6,17 +6,18 @@ use mbe::MacroRules;
use ra_db::{salsa, SourceDatabase}; use ra_db::{salsa, SourceDatabase};
use ra_parser::FragmentKind; use ra_parser::FragmentKind;
use ra_prof::profile; use ra_prof::profile;
use ra_syntax::{AstNode, Parse, SyntaxNode}; use ra_syntax::{AstNode, Parse, SyntaxKind::*, SyntaxNode};
use crate::{ use crate::{
ast_id_map::AstIdMap, BuiltinFnLikeExpander, HirFileId, HirFileIdRepr, MacroCallId, ast_id_map::AstIdMap, BuiltinDeriveExpander, BuiltinFnLikeExpander, HirFileId, HirFileIdRepr,
MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, MacroFileKind, MacroCallId, MacroCallLoc, MacroDefId, MacroDefKind, MacroFile,
}; };
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
pub enum TokenExpander { pub enum TokenExpander {
MacroRules(mbe::MacroRules), MacroRules(mbe::MacroRules),
Builtin(BuiltinFnLikeExpander), Builtin(BuiltinFnLikeExpander),
BuiltinDerive(BuiltinDeriveExpander),
} }
impl TokenExpander { impl TokenExpander {
@ -29,6 +30,7 @@ impl TokenExpander {
match self { match self {
TokenExpander::MacroRules(it) => it.expand(tt), TokenExpander::MacroRules(it) => it.expand(tt),
TokenExpander::Builtin(it) => it.expand(db, id, tt), TokenExpander::Builtin(it) => it.expand(db, id, tt),
TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt),
} }
} }
@ -36,13 +38,15 @@ impl TokenExpander {
match self { match self {
TokenExpander::MacroRules(it) => it.map_id_down(id), TokenExpander::MacroRules(it) => it.map_id_down(id),
TokenExpander::Builtin(..) => id, TokenExpander::Builtin(..) => id,
TokenExpander::BuiltinDerive(..) => id,
} }
} }
pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
match self { match self {
TokenExpander::MacroRules(it) => it.map_id_up(id), TokenExpander::MacroRules(it) => it.map_id_up(id),
TokenExpander::Builtin(..) => (id, mbe::Origin::Def), TokenExpander::Builtin(..) => (id, mbe::Origin::Call),
TokenExpander::BuiltinDerive(..) => (id, mbe::Origin::Call),
} }
} }
} }
@ -76,7 +80,7 @@ pub(crate) fn macro_def(
) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { ) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> {
match id.kind { match id.kind {
MacroDefKind::Declarative => { MacroDefKind::Declarative => {
let macro_call = id.ast_id.to_node(db); let macro_call = id.ast_id?.to_node(db);
let arg = macro_call.token_tree()?; let arg = macro_call.token_tree()?;
let (tt, tmap) = mbe::ast_to_token_tree(&arg).or_else(|| { let (tt, tmap) = mbe::ast_to_token_tree(&arg).or_else(|| {
log::warn!("fail on macro_def to token tree: {:#?}", arg); log::warn!("fail on macro_def to token tree: {:#?}", arg);
@ -89,7 +93,10 @@ pub(crate) fn macro_def(
Some(Arc::new((TokenExpander::MacroRules(rules), tmap))) Some(Arc::new((TokenExpander::MacroRules(rules), tmap)))
} }
MacroDefKind::BuiltIn(expander) => { MacroDefKind::BuiltIn(expander) => {
Some(Arc::new((TokenExpander::Builtin(expander.clone()), mbe::TokenMap::default()))) Some(Arc::new((TokenExpander::Builtin(expander), mbe::TokenMap::default())))
}
MacroDefKind::BuiltInDerive(expander) => {
Some(Arc::new((TokenExpander::BuiltinDerive(expander), mbe::TokenMap::default())))
} }
} }
} }
@ -99,9 +106,8 @@ pub(crate) fn macro_arg(
id: MacroCallId, id: MacroCallId,
) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { ) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> {
let loc = db.lookup_intern_macro(id); let loc = db.lookup_intern_macro(id);
let macro_call = loc.ast_id.to_node(db); let arg = loc.kind.arg(db)?;
let arg = macro_call.token_tree()?; let (tt, tmap) = mbe::syntax_node_to_token_tree(&arg)?;
let (tt, tmap) = mbe::ast_to_token_tree(&arg)?;
Some(Arc::new((tt, tmap))) Some(Arc::new((tt, tmap)))
} }
@ -148,11 +154,43 @@ pub(crate) fn parse_macro(
}) })
.ok()?; .ok()?;
let fragment_kind = match macro_file.macro_file_kind { let fragment_kind = to_fragment_kind(db, macro_call_id);
MacroFileKind::Items => FragmentKind::Items,
MacroFileKind::Expr => FragmentKind::Expr,
MacroFileKind::Statements => FragmentKind::Statements,
};
let (parse, rev_token_map) = mbe::token_tree_to_syntax_node(&tt, fragment_kind).ok()?; let (parse, rev_token_map) = mbe::token_tree_to_syntax_node(&tt, fragment_kind).ok()?;
Some((parse, Arc::new(rev_token_map))) Some((parse, Arc::new(rev_token_map)))
} }
/// Given a `MacroCallId`, return what `FragmentKind` it belongs to.
/// FIXME: Not completed
fn to_fragment_kind(db: &dyn AstDatabase, macro_call_id: MacroCallId) -> FragmentKind {
let syn = db.lookup_intern_macro(macro_call_id).kind.node(db).value;
let parent = match syn.parent() {
Some(it) => it,
None => {
// FIXME:
// If it is root, which means the parent HirFile
// MacroKindFile must be non-items
// return expr now.
return FragmentKind::Expr;
}
};
match parent.kind() {
MACRO_ITEMS | SOURCE_FILE => FragmentKind::Items,
LET_STMT => {
// FIXME: Handle Pattern
FragmentKind::Expr
}
// FIXME: Expand to statements in appropriate positions; HIR lowering needs to handle that
EXPR_STMT | BLOCK => FragmentKind::Expr,
ARG_LIST => FragmentKind::Expr,
TRY_EXPR => FragmentKind::Expr,
TUPLE_EXPR => FragmentKind::Expr,
ITEM_LIST => FragmentKind::Items,
_ => {
// Unknown , Just guess it is `Items`
FragmentKind::Items
}
}
}

View file

@ -18,11 +18,11 @@ use std::{any::Any, fmt};
use ra_syntax::{SyntaxNode, SyntaxNodePtr, TextRange}; use ra_syntax::{SyntaxNode, SyntaxNodePtr, TextRange};
use crate::{db::AstDatabase, Source}; use crate::{db::AstDatabase, InFile};
pub trait Diagnostic: Any + Send + Sync + fmt::Debug + 'static { pub trait Diagnostic: Any + Send + Sync + fmt::Debug + 'static {
fn message(&self) -> String; fn message(&self) -> String;
fn source(&self) -> Source<SyntaxNodePtr>; fn source(&self) -> InFile<SyntaxNodePtr>;
fn highlight_range(&self) -> TextRange { fn highlight_range(&self) -> TextRange {
self.source().value.range() self.source().value.range()
} }

View file

@ -1,54 +0,0 @@
//! FIXME: write short doc here
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Either<A, B> {
A(A),
B(B),
}
impl<A, B> Either<A, B> {
pub fn either<R, F1, F2>(self, f1: F1, f2: F2) -> R
where
F1: FnOnce(A) -> R,
F2: FnOnce(B) -> R,
{
match self {
Either::A(a) => f1(a),
Either::B(b) => f2(b),
}
}
pub fn map<U, V, F1, F2>(self, f1: F1, f2: F2) -> Either<U, V>
where
F1: FnOnce(A) -> U,
F2: FnOnce(B) -> V,
{
match self {
Either::A(a) => Either::A(f1(a)),
Either::B(b) => Either::B(f2(b)),
}
}
pub fn map_a<U, F>(self, f: F) -> Either<U, B>
where
F: FnOnce(A) -> U,
{
self.map(f, |it| it)
}
pub fn a(self) -> Option<A> {
match self {
Either::A(it) => Some(it),
Either::B(_) => None,
}
}
pub fn b(self) -> Option<B> {
match self {
Either::A(_) => None,
Either::B(it) => Some(it),
}
}
pub fn as_ref(&self) -> Either<&A, &B> {
match self {
Either::A(it) => Either::A(it),
Either::B(it) => Either::B(it),
}
}
}

View file

@ -2,12 +2,12 @@
//! //!
//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at //! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
//! this moment, this is horribly incomplete and handles only `$crate`. //! this moment, this is horribly incomplete and handles only `$crate`.
use either::Either;
use ra_db::CrateId; use ra_db::CrateId;
use ra_syntax::ast; use ra_syntax::ast;
use crate::{ use crate::{
db::AstDatabase, db::AstDatabase,
either::Either,
name::{AsName, Name}, name::{AsName, Name},
HirFileId, HirFileIdRepr, MacroDefKind, HirFileId, HirFileIdRepr, MacroDefKind,
}; };
@ -25,8 +25,9 @@ impl Hygiene {
HirFileIdRepr::MacroFile(macro_file) => { HirFileIdRepr::MacroFile(macro_file) => {
let loc = db.lookup_intern_macro(macro_file.macro_call_id); let loc = db.lookup_intern_macro(macro_file.macro_call_id);
match loc.def.kind { match loc.def.kind {
MacroDefKind::Declarative => Some(loc.def.krate), MacroDefKind::Declarative => loc.def.krate,
MacroDefKind::BuiltIn(_) => None, MacroDefKind::BuiltIn(_) => None,
MacroDefKind::BuiltInDerive(_) => None,
} }
} }
}; };
@ -41,9 +42,9 @@ impl Hygiene {
pub fn name_ref_to_name(&self, name_ref: ast::NameRef) -> Either<Name, CrateId> { pub fn name_ref_to_name(&self, name_ref: ast::NameRef) -> Either<Name, CrateId> {
if let Some(def_crate) = self.def_crate { if let Some(def_crate) = self.def_crate {
if name_ref.text() == "$crate" { if name_ref.text() == "$crate" {
return Either::B(def_crate); return Either::Right(def_crate);
} }
} }
Either::A(name_ref.as_name()) Either::Left(name_ref.as_name())
} }
} }

View file

@ -6,14 +6,14 @@
pub mod db; pub mod db;
pub mod ast_id_map; pub mod ast_id_map;
pub mod either;
pub mod name; pub mod name;
pub mod hygiene; pub mod hygiene;
pub mod diagnostics; pub mod diagnostics;
pub mod builtin_derive;
pub mod builtin_macro; pub mod builtin_macro;
pub mod quote; pub mod quote;
use std::hash::{Hash, Hasher}; use std::hash::Hash;
use std::sync::Arc; use std::sync::Arc;
use ra_db::{salsa, CrateId, FileId}; use ra_db::{salsa, CrateId, FileId};
@ -24,6 +24,7 @@ use ra_syntax::{
}; };
use crate::ast_id_map::FileAstId; use crate::ast_id_map::FileAstId;
use crate::builtin_derive::BuiltinDeriveExpander;
use crate::builtin_macro::BuiltinFnLikeExpander; use crate::builtin_macro::BuiltinFnLikeExpander;
#[cfg(test)] #[cfg(test)]
@ -70,7 +71,18 @@ impl HirFileId {
HirFileIdRepr::FileId(file_id) => file_id, HirFileIdRepr::FileId(file_id) => file_id,
HirFileIdRepr::MacroFile(macro_file) => { HirFileIdRepr::MacroFile(macro_file) => {
let loc = db.lookup_intern_macro(macro_file.macro_call_id); let loc = db.lookup_intern_macro(macro_file.macro_call_id);
loc.ast_id.file_id().original_file(db) loc.kind.file_id().original_file(db)
}
}
}
/// If this is a macro call, returns the syntax node of the call.
pub fn call_node(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxNode>> {
match self.0 {
HirFileIdRepr::FileId(_) => None,
HirFileIdRepr::MacroFile(macro_file) => {
let loc = db.lookup_intern_macro(macro_file.macro_call_id);
Some(loc.kind.node(db))
} }
} }
} }
@ -82,17 +94,17 @@ impl HirFileId {
HirFileIdRepr::MacroFile(macro_file) => { HirFileIdRepr::MacroFile(macro_file) => {
let loc: MacroCallLoc = db.lookup_intern_macro(macro_file.macro_call_id); let loc: MacroCallLoc = db.lookup_intern_macro(macro_file.macro_call_id);
let arg_tt = loc.ast_id.to_node(db).token_tree()?; let arg_tt = loc.kind.arg(db)?;
let def_tt = loc.def.ast_id.to_node(db).token_tree()?; let def_tt = loc.def.ast_id?.to_node(db).token_tree()?;
let macro_def = db.macro_def(loc.def)?; let macro_def = db.macro_def(loc.def)?;
let (parse, exp_map) = db.parse_macro(macro_file)?; let (parse, exp_map) = db.parse_macro(macro_file)?;
let macro_arg = db.macro_arg(macro_file.macro_call_id)?; let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
Some(ExpansionInfo { Some(ExpansionInfo {
expanded: Source::new(self, parse.syntax_node()), expanded: InFile::new(self, parse.syntax_node()),
arg: Source::new(loc.ast_id.file_id, arg_tt), arg: InFile::new(loc.kind.file_id(), arg_tt),
def: Source::new(loc.ast_id.file_id, def_tt), def: InFile::new(loc.def.ast_id?.file_id, def_tt),
macro_arg, macro_arg,
macro_def, macro_def,
exp_map, exp_map,
@ -105,14 +117,6 @@ impl HirFileId {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroFile { pub struct MacroFile {
macro_call_id: MacroCallId, macro_call_id: MacroCallId,
macro_file_kind: MacroFileKind,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum MacroFileKind {
Items,
Expr,
Statements,
} }
/// `MacroCallId` identifies a particular macro invocation, like /// `MacroCallId` identifies a particular macro invocation, like
@ -130,18 +134,20 @@ impl salsa::InternKey for MacroCallId {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroDefId { pub struct MacroDefId {
pub krate: CrateId, // FIXME: krate and ast_id are currently optional because we don't have a
pub ast_id: AstId<ast::MacroCall>, // definition location for built-in derives. There is one, though: the
// standard library defines them. The problem is that it uses the new
// `macro` syntax for this, which we don't support yet. As soon as we do
// (which will probably require touching this code), we can instead use
// that (and also remove the hacks for resolving built-in derives).
pub krate: Option<CrateId>,
pub ast_id: Option<AstId<ast::MacroCall>>,
pub kind: MacroDefKind, pub kind: MacroDefKind,
} }
impl MacroDefId { impl MacroDefId {
pub fn as_call_id( pub fn as_call_id(self, db: &dyn db::AstDatabase, kind: MacroCallKind) -> MacroCallId {
self, db.intern_macro(MacroCallLoc { def: self, kind })
db: &dyn db::AstDatabase,
ast_id: AstId<ast::MacroCall>,
) -> MacroCallId {
db.intern_macro(MacroCallLoc { def: self, ast_id })
} }
} }
@ -149,64 +155,103 @@ impl MacroDefId {
pub enum MacroDefKind { pub enum MacroDefKind {
Declarative, Declarative,
BuiltIn(BuiltinFnLikeExpander), BuiltIn(BuiltinFnLikeExpander),
// FIXME: maybe just Builtin and rename BuiltinFnLikeExpander to BuiltinExpander
BuiltInDerive(BuiltinDeriveExpander),
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct MacroCallLoc { pub struct MacroCallLoc {
pub(crate) def: MacroDefId, pub(crate) def: MacroDefId,
pub(crate) ast_id: AstId<ast::MacroCall>, pub(crate) kind: MacroCallKind,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum MacroCallKind {
FnLike(AstId<ast::MacroCall>),
Attr(AstId<ast::ModuleItem>),
}
impl MacroCallKind {
pub fn file_id(&self) -> HirFileId {
match self {
MacroCallKind::FnLike(ast_id) => ast_id.file_id,
MacroCallKind::Attr(ast_id) => ast_id.file_id,
}
}
pub fn node(&self, db: &dyn db::AstDatabase) -> InFile<SyntaxNode> {
match self {
MacroCallKind::FnLike(ast_id) => ast_id.with_value(ast_id.to_node(db).syntax().clone()),
MacroCallKind::Attr(ast_id) => ast_id.with_value(ast_id.to_node(db).syntax().clone()),
}
}
pub fn arg(&self, db: &dyn db::AstDatabase) -> Option<SyntaxNode> {
match self {
MacroCallKind::FnLike(ast_id) => {
Some(ast_id.to_node(db).token_tree()?.syntax().clone())
}
MacroCallKind::Attr(ast_id) => Some(ast_id.to_node(db).syntax().clone()),
}
}
} }
impl MacroCallId { impl MacroCallId {
pub fn as_file(self, kind: MacroFileKind) -> HirFileId { pub fn as_file(self) -> HirFileId {
let macro_file = MacroFile { macro_call_id: self, macro_file_kind: kind }; MacroFile { macro_call_id: self }.into()
macro_file.into()
} }
} }
/// ExpansionInfo mainly describes how to map text range between src and expanded macro /// ExpansionInfo mainly describes how to map text range between src and expanded macro
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct ExpansionInfo { pub struct ExpansionInfo {
expanded: Source<SyntaxNode>, expanded: InFile<SyntaxNode>,
arg: Source<ast::TokenTree>, arg: InFile<SyntaxNode>,
def: Source<ast::TokenTree>, def: InFile<ast::TokenTree>,
macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>,
macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
exp_map: Arc<mbe::TokenMap>, exp_map: Arc<mbe::TokenMap>,
} }
pub use mbe::Origin;
impl ExpansionInfo { impl ExpansionInfo {
pub fn map_token_down(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> { pub fn call_node(&self) -> Option<InFile<SyntaxNode>> {
Some(self.arg.with_value(self.arg.value.parent()?))
}
pub fn map_token_down(&self, token: InFile<&SyntaxToken>) -> Option<InFile<SyntaxToken>> {
assert_eq!(token.file_id, self.arg.file_id); assert_eq!(token.file_id, self.arg.file_id);
let range = let range = token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
token.value.text_range().checked_sub(self.arg.value.syntax().text_range().start())?;
let token_id = self.macro_arg.1.token_by_range(range)?; let token_id = self.macro_arg.1.token_by_range(range)?;
let token_id = self.macro_def.0.map_id_down(token_id); let token_id = self.macro_def.0.map_id_down(token_id);
let range = self.exp_map.range_by_token(token_id)?; let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?;
let token = algo::find_covering_element(&self.expanded.value, range).into_token()?; let token = algo::find_covering_element(&self.expanded.value, range).into_token()?;
Some(self.expanded.with_value(token)) Some(self.expanded.with_value(token))
} }
pub fn map_token_up(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> { pub fn map_token_up(
&self,
token: InFile<&SyntaxToken>,
) -> Option<(InFile<SyntaxToken>, Origin)> {
let token_id = self.exp_map.token_by_range(token.value.text_range())?; let token_id = self.exp_map.token_by_range(token.value.text_range())?;
let (token_id, origin) = self.macro_def.0.map_id_up(token_id); let (token_id, origin) = self.macro_def.0.map_id_up(token_id);
let (token_map, tt) = match origin { let (token_map, tt) = match origin {
mbe::Origin::Call => (&self.macro_arg.1, &self.arg), mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()),
mbe::Origin::Def => (&self.macro_def.1, &self.def), mbe::Origin::Def => {
(&self.macro_def.1, self.def.as_ref().map(|tt| tt.syntax().clone()))
}
}; };
let range = token_map.range_by_token(token_id)?; let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?;
let token = algo::find_covering_element( let token = algo::find_covering_element(&tt.value, range + tt.value.text_range().start())
tt.value.syntax(), .into_token()?;
range + tt.value.syntax().text_range().start(), Some((tt.with_value(token), origin))
)
.into_token()?;
Some(tt.with_value(token))
} }
} }
@ -214,76 +259,66 @@ impl ExpansionInfo {
/// ///
/// It is stable across reparses, and can be used as salsa key/value. /// It is stable across reparses, and can be used as salsa key/value.
// FIXME: isn't this just a `Source<FileAstId<N>>` ? // FIXME: isn't this just a `Source<FileAstId<N>>` ?
#[derive(Debug)] pub type AstId<N> = InFile<FileAstId<N>>;
pub struct AstId<N: AstNode> {
file_id: HirFileId,
file_ast_id: FileAstId<N>,
}
impl<N: AstNode> Clone for AstId<N> {
fn clone(&self) -> AstId<N> {
*self
}
}
impl<N: AstNode> Copy for AstId<N> {}
impl<N: AstNode> PartialEq for AstId<N> {
fn eq(&self, other: &Self) -> bool {
(self.file_id, self.file_ast_id) == (other.file_id, other.file_ast_id)
}
}
impl<N: AstNode> Eq for AstId<N> {}
impl<N: AstNode> Hash for AstId<N> {
fn hash<H: Hasher>(&self, hasher: &mut H) {
(self.file_id, self.file_ast_id).hash(hasher);
}
}
impl<N: AstNode> AstId<N> { impl<N: AstNode> AstId<N> {
pub fn new(file_id: HirFileId, file_ast_id: FileAstId<N>) -> AstId<N> {
AstId { file_id, file_ast_id }
}
pub fn file_id(&self) -> HirFileId {
self.file_id
}
pub fn to_node(&self, db: &dyn db::AstDatabase) -> N { pub fn to_node(&self, db: &dyn db::AstDatabase) -> N {
let root = db.parse_or_expand(self.file_id).unwrap(); let root = db.parse_or_expand(self.file_id).unwrap();
db.ast_id_map(self.file_id).get(self.file_ast_id).to_node(&root) db.ast_id_map(self.file_id).get(self.value).to_node(&root)
} }
} }
/// `Source<T>` stores a value of `T` inside a particular file/syntax tree. /// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
/// ///
/// Typical usages are: /// Typical usages are:
/// ///
/// * `Source<SyntaxNode>` -- syntax node in a file /// * `InFile<SyntaxNode>` -- syntax node in a file
/// * `Source<ast::FnDef>` -- ast node in a file /// * `InFile<ast::FnDef>` -- ast node in a file
/// * `Source<TextUnit>` -- offset in a file /// * `InFile<TextUnit>` -- offset in a file
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub struct Source<T> { pub struct InFile<T> {
pub file_id: HirFileId, pub file_id: HirFileId,
pub value: T, pub value: T,
} }
impl<T> Source<T> { impl<T> InFile<T> {
pub fn new(file_id: HirFileId, value: T) -> Source<T> { pub fn new(file_id: HirFileId, value: T) -> InFile<T> {
Source { file_id, value } InFile { file_id, value }
} }
// Similarly, naming here is stupid... // Similarly, naming here is stupid...
pub fn with_value<U>(&self, value: U) -> Source<U> { pub fn with_value<U>(&self, value: U) -> InFile<U> {
Source::new(self.file_id, value) InFile::new(self.file_id, value)
} }
pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> { pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> InFile<U> {
Source::new(self.file_id, f(self.value)) InFile::new(self.file_id, f(self.value))
} }
pub fn as_ref(&self) -> Source<&T> { pub fn as_ref(&self) -> InFile<&T> {
self.with_value(&self.value) self.with_value(&self.value)
} }
pub fn file_syntax(&self, db: &impl db::AstDatabase) -> SyntaxNode { pub fn file_syntax(&self, db: &impl db::AstDatabase) -> SyntaxNode {
db.parse_or_expand(self.file_id).expect("source created from invalid file") db.parse_or_expand(self.file_id).expect("source created from invalid file")
} }
} }
impl<T: Clone> InFile<&T> {
pub fn cloned(&self) -> InFile<T> {
self.with_value(self.value.clone())
}
}
impl InFile<SyntaxNode> {
pub fn ancestors_with_macros<'a>(
self,
db: &'a impl crate::db::AstDatabase,
) -> impl Iterator<Item = InFile<SyntaxNode>> + 'a {
std::iter::successors(Some(self), move |node| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
None => {
let parent_node = node.file_id.call_node(db)?;
Some(parent_node)
}
})
}
}

View file

@ -38,8 +38,8 @@ impl Name {
} }
/// Shortcut to create inline plain text name /// Shortcut to create inline plain text name
const fn new_inline_ascii(len: usize, text: &[u8]) -> Name { const fn new_inline_ascii(text: &[u8]) -> Name {
Name::new_text(SmolStr::new_inline_from_ascii(len, text)) Name::new_text(SmolStr::new_inline_from_ascii(text.len(), text))
} }
/// Resolve a name from the text of token. /// Resolve a name from the text of token.
@ -83,6 +83,12 @@ impl AsName for ast::Name {
} }
} }
impl AsName for tt::Ident {
fn as_name(&self) -> Name {
Name::resolve(&self.text)
}
}
impl AsName for ast::FieldKind { impl AsName for ast::FieldKind {
fn as_name(&self) -> Name { fn as_name(&self) -> Name {
match self { match self {
@ -98,52 +104,102 @@ impl AsName for ra_db::Dependency {
} }
} }
// Primitives pub mod known {
pub const ISIZE: Name = Name::new_inline_ascii(5, b"isize"); macro_rules! known_names {
pub const I8: Name = Name::new_inline_ascii(2, b"i8"); ($($ident:ident),* $(,)?) => {
pub const I16: Name = Name::new_inline_ascii(3, b"i16"); $(
pub const I32: Name = Name::new_inline_ascii(3, b"i32"); #[allow(bad_style)]
pub const I64: Name = Name::new_inline_ascii(3, b"i64"); pub const $ident: super::Name =
pub const I128: Name = Name::new_inline_ascii(4, b"i128"); super::Name::new_inline_ascii(stringify!($ident).as_bytes());
pub const USIZE: Name = Name::new_inline_ascii(5, b"usize"); )*
pub const U8: Name = Name::new_inline_ascii(2, b"u8"); };
pub const U16: Name = Name::new_inline_ascii(3, b"u16"); }
pub const U32: Name = Name::new_inline_ascii(3, b"u32");
pub const U64: Name = Name::new_inline_ascii(3, b"u64");
pub const U128: Name = Name::new_inline_ascii(4, b"u128");
pub const F32: Name = Name::new_inline_ascii(3, b"f32");
pub const F64: Name = Name::new_inline_ascii(3, b"f64");
pub const BOOL: Name = Name::new_inline_ascii(4, b"bool");
pub const CHAR: Name = Name::new_inline_ascii(4, b"char");
pub const STR: Name = Name::new_inline_ascii(3, b"str");
// Special names known_names!(
pub const SELF_PARAM: Name = Name::new_inline_ascii(4, b"self"); // Primitives
pub const SELF_TYPE: Name = Name::new_inline_ascii(4, b"Self"); isize,
pub const MACRO_RULES: Name = Name::new_inline_ascii(11, b"macro_rules"); i8,
i16,
i32,
i64,
i128,
usize,
u8,
u16,
u32,
u64,
u128,
f32,
f64,
bool,
char,
str,
// Special names
macro_rules,
// Components of known path (value or mod name)
std,
iter,
ops,
future,
result,
boxed,
// Components of known path (type name)
IntoIterator,
Item,
Try,
Ok,
Future,
Result,
Output,
Target,
Box,
RangeFrom,
RangeFull,
RangeInclusive,
RangeToInclusive,
RangeTo,
Range,
Neg,
Not,
Index,
// Builtin macros
file,
column,
compile_error,
line,
stringify,
format_args,
format_args_nl,
env,
option_env,
// Builtin derives
Copy,
Clone,
Default,
Debug,
Hash,
Ord,
PartialOrd,
Eq,
PartialEq,
);
// Components of known path (value or mod name) // self/Self cannot be used as an identifier
pub const STD: Name = Name::new_inline_ascii(3, b"std"); pub const SELF_PARAM: super::Name = super::Name::new_inline_ascii(b"self");
pub const ITER: Name = Name::new_inline_ascii(4, b"iter"); pub const SELF_TYPE: super::Name = super::Name::new_inline_ascii(b"Self");
pub const OPS: Name = Name::new_inline_ascii(3, b"ops");
pub const FUTURE: Name = Name::new_inline_ascii(6, b"future");
pub const RESULT: Name = Name::new_inline_ascii(6, b"result");
pub const BOXED: Name = Name::new_inline_ascii(5, b"boxed");
// Components of known path (type name) #[macro_export]
pub const INTO_ITERATOR_TYPE: Name = Name::new_inline_ascii(12, b"IntoIterator"); macro_rules! name {
pub const ITEM_TYPE: Name = Name::new_inline_ascii(4, b"Item"); (self) => {
pub const TRY_TYPE: Name = Name::new_inline_ascii(3, b"Try"); $crate::name::known::SELF_PARAM
pub const OK_TYPE: Name = Name::new_inline_ascii(2, b"Ok"); };
pub const FUTURE_TYPE: Name = Name::new_inline_ascii(6, b"Future"); (Self) => {
pub const RESULT_TYPE: Name = Name::new_inline_ascii(6, b"Result"); $crate::name::known::SELF_TYPE
pub const OUTPUT_TYPE: Name = Name::new_inline_ascii(6, b"Output"); };
pub const TARGET_TYPE: Name = Name::new_inline_ascii(6, b"Target"); ($ident:ident) => {
pub const BOX_TYPE: Name = Name::new_inline_ascii(3, b"Box"); $crate::name::known::$ident
};
}
}
// Builtin Macros pub use crate::name;
pub const FILE_MACRO: Name = Name::new_inline_ascii(4, b"file");
pub const COLUMN_MACRO: Name = Name::new_inline_ascii(6, b"column");
pub const COMPILE_ERROR_MACRO: Name = Name::new_inline_ascii(13, b"compile_error");
pub const LINE_MACRO: Name = Name::new_inline_ascii(4, b"line");
pub const STRINGIFY_MACRO: Name = Name::new_inline_ascii(9, b"stringify");

View file

@ -16,7 +16,10 @@ macro_rules! __quote {
{ {
let children = $crate::__quote!($($tt)*); let children = $crate::__quote!($($tt)*);
let subtree = tt::Subtree { let subtree = tt::Subtree {
delimiter: tt::Delimiter::$delim, delimiter: Some(tt::Delimiter {
kind: tt::DelimiterKind::$delim,
id: tt::TokenId::unspecified(),
}),
token_trees: $crate::quote::IntoTt::to_tokens(children), token_trees: $crate::quote::IntoTt::to_tokens(children),
}; };
subtree subtree
@ -29,6 +32,7 @@ macro_rules! __quote {
tt::Leaf::Punct(tt::Punct { tt::Leaf::Punct(tt::Punct {
char: $first, char: $first,
spacing: tt::Spacing::Alone, spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
}).into() }).into()
] ]
} }
@ -40,10 +44,12 @@ macro_rules! __quote {
tt::Leaf::Punct(tt::Punct { tt::Leaf::Punct(tt::Punct {
char: $first, char: $first,
spacing: tt::Spacing::Joint, spacing: tt::Spacing::Joint,
id: tt::TokenId::unspecified(),
}).into(), }).into(),
tt::Leaf::Punct(tt::Punct { tt::Leaf::Punct(tt::Punct {
char: $sec, char: $sec,
spacing: tt::Spacing::Alone, spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
}).into() }).into()
] ]
} }
@ -60,6 +66,15 @@ macro_rules! __quote {
} }
}; };
( ## $first:ident $($tail:tt)* ) => {
{
let mut tokens = $first.into_iter().map($crate::quote::ToTokenTree::to_token).collect::<Vec<tt::TokenTree>>();
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
tokens.append(&mut tail_tokens);
tokens
}
};
// Brace // Brace
( { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE Brace $($tt)*) }; ( { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE Brace $($tt)*) };
// Bracket // Bracket
@ -85,7 +100,10 @@ macro_rules! __quote {
( & ) => {$crate::__quote!(@PUNCT '&')}; ( & ) => {$crate::__quote!(@PUNCT '&')};
( , ) => {$crate::__quote!(@PUNCT ',')}; ( , ) => {$crate::__quote!(@PUNCT ',')};
( : ) => {$crate::__quote!(@PUNCT ':')}; ( : ) => {$crate::__quote!(@PUNCT ':')};
( :: ) => {$crate::__quote!(@PUNCT ':', ':')};
( . ) => {$crate::__quote!(@PUNCT '.')}; ( . ) => {$crate::__quote!(@PUNCT '.')};
( < ) => {$crate::__quote!(@PUNCT '<')};
( > ) => {$crate::__quote!(@PUNCT '>')};
( $first:tt $($tail:tt)+ ) => { ( $first:tt $($tail:tt)+ ) => {
{ {
@ -114,7 +132,7 @@ pub(crate) trait IntoTt {
impl IntoTt for Vec<tt::TokenTree> { impl IntoTt for Vec<tt::TokenTree> {
fn to_subtree(self) -> tt::Subtree { fn to_subtree(self) -> tt::Subtree {
tt::Subtree { delimiter: tt::Delimiter::None, token_trees: self } tt::Subtree { delimiter: None, token_trees: self }
} }
fn to_tokens(self) -> Vec<tt::TokenTree> { fn to_tokens(self) -> Vec<tt::TokenTree> {
@ -169,15 +187,15 @@ macro_rules! impl_to_to_tokentrees {
} }
impl_to_to_tokentrees! { impl_to_to_tokentrees! {
u32 => self { tt::Literal{text: self.to_string().into()} }; u32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()} };
usize => self { tt::Literal{text: self.to_string().into()}}; usize => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()}};
i32 => self { tt::Literal{text: self.to_string().into()}}; i32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()}};
tt::Leaf => self { self }; tt::Leaf => self { self };
tt::Literal => self { self }; tt::Literal => self { self };
tt::Ident => self { self }; tt::Ident => self { self };
tt::Punct => self { self }; tt::Punct => self { self };
&str => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into()}}; &str => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}};
String => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into()}} String => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}}
} }
#[cfg(test)] #[cfg(test)]
@ -244,7 +262,13 @@ mod tests {
let fields = let fields =
fields.iter().map(|it| quote!(#it: self.#it.clone(), ).token_trees.clone()).flatten(); fields.iter().map(|it| quote!(#it: self.#it.clone(), ).token_trees.clone()).flatten();
let list = tt::Subtree { delimiter: tt::Delimiter::Brace, token_trees: fields.collect() }; let list = tt::Subtree {
delimiter: Some(tt::Delimiter {
kind: tt::DelimiterKind::Brace,
id: tt::TokenId::unspecified(),
}),
token_trees: fields.collect(),
};
let quoted = quote! { let quoted = quote! {
impl Clone for #struct_name { impl Clone for #struct_name {

View file

@ -21,10 +21,9 @@ ra_prof = { path = "../ra_prof" }
ra_syntax = { path = "../ra_syntax" } ra_syntax = { path = "../ra_syntax" }
test_utils = { path = "../test_utils" } test_utils = { path = "../test_utils" }
# https://github.com/rust-lang/chalk/pull/294 chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5" }
chalk-solve = { git = "https://github.com/jackh726/chalk.git", rev = "095cd38a4f16337913bba487f2055b9ca0179f30" } chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5" }
chalk-rust-ir = { git = "https://github.com/jackh726/chalk.git", rev = "095cd38a4f16337913bba487f2055b9ca0179f30" } chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5" }
chalk-ir = { git = "https://github.com/jackh726/chalk.git", rev = "095cd38a4f16337913bba487f2055b9ca0179f30" }
lalrpop-intern = "0.15.1" lalrpop-intern = "0.15.1"

View file

@ -6,14 +6,14 @@
use std::iter::successors; use std::iter::successors;
use hir_def::lang_item::LangItemTarget; use hir_def::lang_item::LangItemTarget;
use hir_expand::name; use hir_expand::name::name;
use log::{info, warn}; use log::{info, warn};
use ra_db::CrateId; use ra_db::CrateId;
use crate::db::HirDatabase; use crate::{
db::HirDatabase,
use super::{
traits::{InEnvironment, Solution}, traits::{InEnvironment, Solution},
utils::generics,
Canonical, Substs, Ty, TypeWalk, Canonical, Substs, Ty, TypeWalk,
}; };
@ -48,14 +48,14 @@ fn deref_by_trait(
krate: CrateId, krate: CrateId,
ty: InEnvironment<&Canonical<Ty>>, ty: InEnvironment<&Canonical<Ty>>,
) -> Option<Canonical<Ty>> { ) -> Option<Canonical<Ty>> {
let deref_trait = match db.lang_item(krate.into(), "deref".into())? { let deref_trait = match db.lang_item(krate, "deref".into())? {
LangItemTarget::TraitId(it) => it, LangItemTarget::TraitId(it) => it,
_ => return None, _ => return None,
}; };
let target = db.trait_data(deref_trait).associated_type_by_name(&name::TARGET_TYPE)?; let target = db.trait_data(deref_trait).associated_type_by_name(&name![Target])?;
let generic_params = db.generic_params(target.into()); let generic_params = generics(db, target.into());
if generic_params.count_params_including_parent() != 1 { if generic_params.len() != 1 {
// the Target type + Deref trait should only have one generic parameter, // the Target type + Deref trait should only have one generic parameter,
// namely Deref's Self type // namely Deref's Self type
return None; return None;
@ -78,7 +78,7 @@ fn deref_by_trait(
let canonical = super::Canonical { num_vars: 1 + ty.value.num_vars, value: in_env }; let canonical = super::Canonical { num_vars: 1 + ty.value.num_vars, value: in_env };
let solution = db.trait_solve(krate.into(), canonical)?; let solution = db.trait_solve(krate, canonical)?;
match &solution { match &solution {
Solution::Unique(vars) => { Solution::Unique(vars) => {

View file

@ -10,8 +10,8 @@ use ra_db::{salsa, CrateId};
use crate::{ use crate::{
method_resolution::CrateImplBlocks, method_resolution::CrateImplBlocks,
traits::{AssocTyValue, Impl}, traits::{chalk, AssocTyValue, Impl},
CallableDef, FnSig, GenericPredicate, ImplTy, InferenceResult, Substs, Ty, TyDefId, TypeCtor, CallableDef, FnSig, GenericPredicate, InferenceResult, Substs, TraitRef, Ty, TyDefId, TypeCtor,
ValueTyDefId, ValueTyDefId,
}; };
@ -22,13 +22,18 @@ pub trait HirDatabase: DefDatabase {
fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>; fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
#[salsa::invoke(crate::lower::ty_query)] #[salsa::invoke(crate::lower::ty_query)]
#[salsa::cycle(crate::lower::ty_recover)]
fn ty(&self, def: TyDefId) -> Ty; fn ty(&self, def: TyDefId) -> Ty;
#[salsa::invoke(crate::lower::value_ty_query)] #[salsa::invoke(crate::lower::value_ty_query)]
fn value_ty(&self, def: ValueTyDefId) -> Ty; fn value_ty(&self, def: ValueTyDefId) -> Ty;
#[salsa::invoke(crate::lower::impl_ty_query)] #[salsa::invoke(crate::lower::impl_self_ty_query)]
fn impl_ty(&self, def: ImplId) -> ImplTy; #[salsa::cycle(crate::lower::impl_self_ty_recover)]
fn impl_self_ty(&self, def: ImplId) -> Ty;
#[salsa::invoke(crate::lower::impl_trait_query)]
fn impl_trait(&self, def: ImplId) -> Option<TraitRef>;
#[salsa::invoke(crate::lower::field_types_query)] #[salsa::invoke(crate::lower::field_types_query)]
fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalStructFieldId, Ty>>; fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalStructFieldId, Ty>>;
@ -37,6 +42,7 @@ pub trait HirDatabase: DefDatabase {
fn callable_item_signature(&self, def: CallableDef) -> FnSig; fn callable_item_signature(&self, def: CallableDef) -> FnSig;
#[salsa::invoke(crate::lower::generic_predicates_for_param_query)] #[salsa::invoke(crate::lower::generic_predicates_for_param_query)]
#[salsa::cycle(crate::lower::generic_predicates_for_param_recover)]
fn generic_predicates_for_param( fn generic_predicates_for_param(
&self, &self,
def: GenericDefId, def: GenericDefId,
@ -71,39 +77,24 @@ pub trait HirDatabase: DefDatabase {
#[salsa::interned] #[salsa::interned]
fn intern_assoc_ty_value(&self, assoc_ty_value: AssocTyValue) -> crate::traits::AssocTyValueId; fn intern_assoc_ty_value(&self, assoc_ty_value: AssocTyValue) -> crate::traits::AssocTyValueId;
#[salsa::invoke(crate::traits::chalk::associated_ty_data_query)] #[salsa::invoke(chalk::associated_ty_data_query)]
fn associated_ty_data( fn associated_ty_data(&self, id: chalk::AssocTypeId) -> Arc<chalk::AssociatedTyDatum>;
&self,
id: chalk_ir::TypeId,
) -> Arc<chalk_rust_ir::AssociatedTyDatum<chalk_ir::family::ChalkIr>>;
#[salsa::invoke(crate::traits::chalk::trait_datum_query)] #[salsa::invoke(chalk::trait_datum_query)]
fn trait_datum( fn trait_datum(&self, krate: CrateId, trait_id: chalk::TraitId) -> Arc<chalk::TraitDatum>;
&self,
krate: CrateId,
trait_id: chalk_ir::TraitId,
) -> Arc<chalk_rust_ir::TraitDatum<chalk_ir::family::ChalkIr>>;
#[salsa::invoke(crate::traits::chalk::struct_datum_query)] #[salsa::invoke(chalk::struct_datum_query)]
fn struct_datum( fn struct_datum(&self, krate: CrateId, struct_id: chalk::StructId) -> Arc<chalk::StructDatum>;
&self,
krate: CrateId,
struct_id: chalk_ir::StructId,
) -> Arc<chalk_rust_ir::StructDatum<chalk_ir::family::ChalkIr>>;
#[salsa::invoke(crate::traits::chalk::impl_datum_query)] #[salsa::invoke(crate::traits::chalk::impl_datum_query)]
fn impl_datum( fn impl_datum(&self, krate: CrateId, impl_id: chalk::ImplId) -> Arc<chalk::ImplDatum>;
&self,
krate: CrateId,
impl_id: chalk_ir::ImplId,
) -> Arc<chalk_rust_ir::ImplDatum<chalk_ir::family::ChalkIr>>;
#[salsa::invoke(crate::traits::chalk::associated_ty_value_query)] #[salsa::invoke(crate::traits::chalk::associated_ty_value_query)]
fn associated_ty_value( fn associated_ty_value(
&self, &self,
krate: CrateId, krate: CrateId,
id: chalk_rust_ir::AssociatedTyValueId, id: chalk::AssociatedTyValueId,
) -> Arc<chalk_rust_ir::AssociatedTyValue<chalk_ir::family::ChalkIr>>; ) -> Arc<chalk::AssociatedTyValue>;
#[salsa::invoke(crate::traits::trait_solve_query)] #[salsa::invoke(crate::traits::trait_solve_query)]
fn trait_solve( fn trait_solve(

View file

@ -2,7 +2,7 @@
use std::any::Any; use std::any::Any;
use hir_expand::{db::AstDatabase, name::Name, HirFileId, Source}; use hir_expand::{db::AstDatabase, name::Name, HirFileId, InFile};
use ra_syntax::{ast, AstNode, AstPtr, SyntaxNodePtr}; use ra_syntax::{ast, AstNode, AstPtr, SyntaxNodePtr};
pub use hir_def::diagnostics::UnresolvedModule; pub use hir_def::diagnostics::UnresolvedModule;
@ -19,8 +19,8 @@ impl Diagnostic for NoSuchField {
"no such field".to_string() "no such field".to_string()
} }
fn source(&self) -> Source<SyntaxNodePtr> { fn source(&self) -> InFile<SyntaxNodePtr> {
Source { file_id: self.file, value: self.field.into() } InFile { file_id: self.file, value: self.field.into() }
} }
fn as_any(&self) -> &(dyn Any + Send + 'static) { fn as_any(&self) -> &(dyn Any + Send + 'static) {
@ -44,8 +44,8 @@ impl Diagnostic for MissingFields {
} }
message message
} }
fn source(&self) -> Source<SyntaxNodePtr> { fn source(&self) -> InFile<SyntaxNodePtr> {
Source { file_id: self.file, value: self.field_list.into() } InFile { file_id: self.file, value: self.field_list.into() }
} }
fn as_any(&self) -> &(dyn Any + Send + 'static) { fn as_any(&self) -> &(dyn Any + Send + 'static) {
self self
@ -72,8 +72,8 @@ impl Diagnostic for MissingOkInTailExpr {
fn message(&self) -> String { fn message(&self) -> String {
"wrap return expression in Ok".to_string() "wrap return expression in Ok".to_string()
} }
fn source(&self) -> Source<SyntaxNodePtr> { fn source(&self) -> InFile<SyntaxNodePtr> {
Source { file_id: self.file, value: self.expr.into() } InFile { file_id: self.file, value: self.expr.into() }
} }
fn as_any(&self) -> &(dyn Any + Send + 'static) { fn as_any(&self) -> &(dyn Any + Send + 'static) {
self self

View file

@ -10,6 +10,7 @@ pub struct HirFormatter<'a, 'b, DB> {
buf: String, buf: String,
curr_size: usize, curr_size: usize,
max_size: Option<usize>, max_size: Option<usize>,
should_display_default_types: bool,
} }
pub trait HirDisplay { pub trait HirDisplay {
@ -19,7 +20,7 @@ pub trait HirDisplay {
where where
Self: Sized, Self: Sized,
{ {
HirDisplayWrapper(db, self, None) HirDisplayWrapper(db, self, None, true)
} }
fn display_truncated<'a, DB>( fn display_truncated<'a, DB>(
@ -30,7 +31,7 @@ pub trait HirDisplay {
where where
Self: Sized, Self: Sized,
{ {
HirDisplayWrapper(db, self, max_size) HirDisplayWrapper(db, self, max_size, false)
} }
} }
@ -72,9 +73,13 @@ where
false false
} }
} }
pub fn should_display_default_types(&self) -> bool {
self.should_display_default_types
}
} }
pub struct HirDisplayWrapper<'a, DB, T>(&'a DB, &'a T, Option<usize>); pub struct HirDisplayWrapper<'a, DB, T>(&'a DB, &'a T, Option<usize>, bool);
impl<'a, DB, T> fmt::Display for HirDisplayWrapper<'a, DB, T> impl<'a, DB, T> fmt::Display for HirDisplayWrapper<'a, DB, T>
where where
@ -88,6 +93,7 @@ where
buf: String::with_capacity(20), buf: String::with_capacity(20),
curr_size: 0, curr_size: 0,
max_size: self.2, max_size: self.2,
should_display_default_types: self.3,
}) })
} }
} }

View file

@ -3,7 +3,7 @@
use std::sync::Arc; use std::sync::Arc;
use hir_def::{ use hir_def::{
path::{known, Path}, path::{path, Path},
resolver::HasResolver, resolver::HasResolver,
AdtId, FunctionId, AdtId, FunctionId,
}; };
@ -97,7 +97,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
let (_, source_map) = db.body_with_source_map(self.func.into()); let (_, source_map) = db.body_with_source_map(self.func.into());
if let Some(source_ptr) = source_map.expr_syntax(id) { if let Some(source_ptr) = source_map.expr_syntax(id) {
if let Some(expr) = source_ptr.value.a() { if let Some(expr) = source_ptr.value.left() {
let root = source_ptr.file_syntax(db); let root = source_ptr.file_syntax(db);
if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) { if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) {
if let Some(field_list) = record_lit.record_field_list() { if let Some(field_list) = record_lit.record_field_list() {
@ -124,7 +124,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
None => return, None => return,
}; };
let std_result_path = known::std_result_result(); let std_result_path = path![std::result::Result];
let resolver = self.func.resolver(db); let resolver = self.func.resolver(db);
let std_result_enum = match resolver.resolve_known_enum(db, &std_result_path) { let std_result_enum = match resolver.resolve_known_enum(db, &std_result_path) {
@ -142,7 +142,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
let (_, source_map) = db.body_with_source_map(self.func.into()); let (_, source_map) = db.body_with_source_map(self.func.into());
if let Some(source_ptr) = source_map.expr_syntax(id) { if let Some(source_ptr) = source_map.expr_syntax(id) {
if let Some(expr) = source_ptr.value.a() { if let Some(expr) = source_ptr.value.left() {
self.sink.push(MissingOkInTailExpr { file: source_ptr.file_id, expr }); self.sink.push(MissingOkInTailExpr { file: source_ptr.file_id, expr });
} }
} }

View file

@ -18,19 +18,18 @@ use std::mem;
use std::ops::Index; use std::ops::Index;
use std::sync::Arc; use std::sync::Arc;
use ena::unify::{InPlaceUnificationTable, NoError, UnifyKey, UnifyValue};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use hir_def::{ use hir_def::{
body::Body, body::Body,
data::{ConstData, FunctionData}, data::{ConstData, FunctionData},
expr::{BindingAnnotation, ExprId, PatId}, expr::{BindingAnnotation, ExprId, PatId},
path::{known, Path}, path::{path, Path},
resolver::{HasResolver, Resolver, TypeNs}, resolver::{HasResolver, Resolver, TypeNs},
type_ref::{Mutability, TypeRef}, type_ref::{Mutability, TypeRef},
AdtId, AssocItemId, DefWithBodyId, FunctionId, StructFieldId, TypeAliasId, VariantId, AdtId, AssocItemId, DefWithBodyId, FunctionId, StructFieldId, TypeAliasId, VariantId,
}; };
use hir_expand::{diagnostics::DiagnosticSink, name}; use hir_expand::{diagnostics::DiagnosticSink, name::name};
use ra_arena::map::ArenaMap; use ra_arena::map::ArenaMap;
use ra_prof::profile; use ra_prof::profile;
use test_utils::tested_by; use test_utils::tested_by;
@ -43,6 +42,8 @@ use super::{
}; };
use crate::{db::HirDatabase, infer::diagnostics::InferenceDiagnostic}; use crate::{db::HirDatabase, infer::diagnostics::InferenceDiagnostic};
pub(crate) use unify::unify;
macro_rules! ty_app { macro_rules! ty_app {
($ctor:pat, $param:pat) => { ($ctor:pat, $param:pat) => {
crate::Ty::Apply(crate::ApplicationTy { ctor: $ctor, parameters: $param }) crate::Ty::Apply(crate::ApplicationTy { ctor: $ctor, parameters: $param })
@ -191,11 +192,16 @@ struct InferenceContext<'a, D: HirDatabase> {
owner: DefWithBodyId, owner: DefWithBodyId,
body: Arc<Body>, body: Arc<Body>,
resolver: Resolver, resolver: Resolver,
var_unification_table: InPlaceUnificationTable<TypeVarId>, table: unify::InferenceTable,
trait_env: Arc<TraitEnvironment>, trait_env: Arc<TraitEnvironment>,
obligations: Vec<Obligation>, obligations: Vec<Obligation>,
result: InferenceResult, result: InferenceResult,
/// The return type of the function being inferred. /// The return type of the function being inferred, or the closure if we're
/// currently within one.
///
/// We might consider using a nested inference context for checking
/// closures, but currently this is the only field that will change there,
/// so it doesn't make sense.
return_ty: Ty, return_ty: Ty,
/// Impls of `CoerceUnsized` used in coercion. /// Impls of `CoerceUnsized` used in coercion.
@ -209,7 +215,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
fn new(db: &'a D, owner: DefWithBodyId, resolver: Resolver) -> Self { fn new(db: &'a D, owner: DefWithBodyId, resolver: Resolver) -> Self {
InferenceContext { InferenceContext {
result: InferenceResult::default(), result: InferenceResult::default(),
var_unification_table: InPlaceUnificationTable::new(), table: unify::InferenceTable::new(),
obligations: Vec::default(), obligations: Vec::default(),
return_ty: Ty::Unknown, // set in collect_fn_signature return_ty: Ty::Unknown, // set in collect_fn_signature
trait_env: TraitEnvironment::lower(db, &resolver), trait_env: TraitEnvironment::lower(db, &resolver),
@ -224,13 +230,12 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
fn resolve_all(mut self) -> InferenceResult { fn resolve_all(mut self) -> InferenceResult {
// FIXME resolve obligations as well (use Guidance if necessary) // FIXME resolve obligations as well (use Guidance if necessary)
let mut result = mem::replace(&mut self.result, InferenceResult::default()); let mut result = mem::replace(&mut self.result, InferenceResult::default());
let mut tv_stack = Vec::new();
for ty in result.type_of_expr.values_mut() { for ty in result.type_of_expr.values_mut() {
let resolved = self.resolve_ty_completely(&mut tv_stack, mem::replace(ty, Ty::Unknown)); let resolved = self.table.resolve_ty_completely(mem::replace(ty, Ty::Unknown));
*ty = resolved; *ty = resolved;
} }
for ty in result.type_of_pat.values_mut() { for ty in result.type_of_pat.values_mut() {
let resolved = self.resolve_ty_completely(&mut tv_stack, mem::replace(ty, Ty::Unknown)); let resolved = self.table.resolve_ty_completely(mem::replace(ty, Ty::Unknown));
*ty = resolved; *ty = resolved;
} }
result result
@ -275,96 +280,38 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
self.normalize_associated_types_in(ty) self.normalize_associated_types_in(ty)
} }
fn unify_substs(&mut self, substs1: &Substs, substs2: &Substs, depth: usize) -> bool { /// Replaces `impl Trait` in `ty` by type variables and obligations for
substs1.0.iter().zip(substs2.0.iter()).all(|(t1, t2)| self.unify_inner(t1, t2, depth)) /// those variables. This is done for function arguments when calling a
} /// function, and for return types when inside the function body, i.e. in
/// the cases where the `impl Trait` is 'transparent'. In other cases, `impl
fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool { /// Trait` is represented by `Ty::Opaque`.
self.unify_inner(ty1, ty2, 0) fn insert_vars_for_impl_trait(&mut self, ty: Ty) -> Ty {
} ty.fold(&mut |ty| match ty {
Ty::Opaque(preds) => {
fn unify_inner(&mut self, ty1: &Ty, ty2: &Ty, depth: usize) -> bool { tested_by!(insert_vars_for_impl_trait);
if depth > 1000 { let var = self.table.new_type_var();
// prevent stackoverflows let var_subst = Substs::builder(1).push(var.clone()).build();
panic!("infinite recursion in unification"); self.obligations.extend(
} preds
if ty1 == ty2 { .iter()
return true; .map(|pred| pred.clone().subst_bound_vars(&var_subst))
} .filter_map(Obligation::from_predicate),
// try to resolve type vars first );
let ty1 = self.resolve_ty_shallow(ty1); var
let ty2 = self.resolve_ty_shallow(ty2);
match (&*ty1, &*ty2) {
(Ty::Apply(a_ty1), Ty::Apply(a_ty2)) if a_ty1.ctor == a_ty2.ctor => {
self.unify_substs(&a_ty1.parameters, &a_ty2.parameters, depth + 1)
} }
_ => self.unify_inner_trivial(&ty1, &ty2), _ => ty,
} })
}
fn unify_inner_trivial(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
match (ty1, ty2) {
(Ty::Unknown, _) | (_, Ty::Unknown) => true,
(Ty::Infer(InferTy::TypeVar(tv1)), Ty::Infer(InferTy::TypeVar(tv2)))
| (Ty::Infer(InferTy::IntVar(tv1)), Ty::Infer(InferTy::IntVar(tv2)))
| (Ty::Infer(InferTy::FloatVar(tv1)), Ty::Infer(InferTy::FloatVar(tv2)))
| (
Ty::Infer(InferTy::MaybeNeverTypeVar(tv1)),
Ty::Infer(InferTy::MaybeNeverTypeVar(tv2)),
) => {
// both type vars are unknown since we tried to resolve them
self.var_unification_table.union(*tv1, *tv2);
true
}
// The order of MaybeNeverTypeVar matters here.
// Unifying MaybeNeverTypeVar and TypeVar will let the latter become MaybeNeverTypeVar.
// Unifying MaybeNeverTypeVar and other concrete type will let the former become it.
(Ty::Infer(InferTy::TypeVar(tv)), other)
| (other, Ty::Infer(InferTy::TypeVar(tv)))
| (Ty::Infer(InferTy::MaybeNeverTypeVar(tv)), other)
| (other, Ty::Infer(InferTy::MaybeNeverTypeVar(tv)))
| (Ty::Infer(InferTy::IntVar(tv)), other @ ty_app!(TypeCtor::Int(_)))
| (other @ ty_app!(TypeCtor::Int(_)), Ty::Infer(InferTy::IntVar(tv)))
| (Ty::Infer(InferTy::FloatVar(tv)), other @ ty_app!(TypeCtor::Float(_)))
| (other @ ty_app!(TypeCtor::Float(_)), Ty::Infer(InferTy::FloatVar(tv))) => {
// the type var is unknown since we tried to resolve it
self.var_unification_table.union_value(*tv, TypeVarValue::Known(other.clone()));
true
}
_ => false,
}
}
fn new_type_var(&mut self) -> Ty {
Ty::Infer(InferTy::TypeVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
}
fn new_integer_var(&mut self) -> Ty {
Ty::Infer(InferTy::IntVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
}
fn new_float_var(&mut self) -> Ty {
Ty::Infer(InferTy::FloatVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
}
fn new_maybe_never_type_var(&mut self) -> Ty {
Ty::Infer(InferTy::MaybeNeverTypeVar(
self.var_unification_table.new_key(TypeVarValue::Unknown),
))
} }
/// Replaces Ty::Unknown by a new type var, so we can maybe still infer it. /// Replaces Ty::Unknown by a new type var, so we can maybe still infer it.
fn insert_type_vars_shallow(&mut self, ty: Ty) -> Ty { fn insert_type_vars_shallow(&mut self, ty: Ty) -> Ty {
match ty { match ty {
Ty::Unknown => self.new_type_var(), Ty::Unknown => self.table.new_type_var(),
Ty::Apply(ApplicationTy { ctor: TypeCtor::Int(Uncertain::Unknown), .. }) => { Ty::Apply(ApplicationTy { ctor: TypeCtor::Int(Uncertain::Unknown), .. }) => {
self.new_integer_var() self.table.new_integer_var()
} }
Ty::Apply(ApplicationTy { ctor: TypeCtor::Float(Uncertain::Unknown), .. }) => { Ty::Apply(ApplicationTy { ctor: TypeCtor::Float(Uncertain::Unknown), .. }) => {
self.new_float_var() self.table.new_float_var()
} }
_ => ty, _ => ty,
} }
@ -402,64 +349,52 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
} }
} }
fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
self.table.unify(ty1, ty2)
}
/// Resolves the type as far as currently possible, replacing type variables /// Resolves the type as far as currently possible, replacing type variables
/// by their known types. All types returned by the infer_* functions should /// by their known types. All types returned by the infer_* functions should
/// be resolved as far as possible, i.e. contain no type variables with /// be resolved as far as possible, i.e. contain no type variables with
/// known type. /// known type.
fn resolve_ty_as_possible(&mut self, tv_stack: &mut Vec<TypeVarId>, ty: Ty) -> Ty { fn resolve_ty_as_possible(&mut self, ty: Ty) -> Ty {
self.resolve_obligations_as_possible(); self.resolve_obligations_as_possible();
ty.fold(&mut |ty| match ty { self.table.resolve_ty_as_possible(ty)
Ty::Infer(tv) => {
let inner = tv.to_inner();
if tv_stack.contains(&inner) {
tested_by!(type_var_cycles_resolve_as_possible);
// recursive type
return tv.fallback_value();
}
if let Some(known_ty) =
self.var_unification_table.inlined_probe_value(inner).known()
{
// known_ty may contain other variables that are known by now
tv_stack.push(inner);
let result = self.resolve_ty_as_possible(tv_stack, known_ty.clone());
tv_stack.pop();
result
} else {
ty
}
}
_ => ty,
})
} }
/// If `ty` is a type variable with known type, returns that type;
/// otherwise, return ty.
fn resolve_ty_shallow<'b>(&mut self, ty: &'b Ty) -> Cow<'b, Ty> { fn resolve_ty_shallow<'b>(&mut self, ty: &'b Ty) -> Cow<'b, Ty> {
let mut ty = Cow::Borrowed(ty); self.table.resolve_ty_shallow(ty)
// The type variable could resolve to a int/float variable. Hence try }
// resolving up to three times; each type of variable shouldn't occur
// more than once fn resolve_associated_type(&mut self, inner_ty: Ty, assoc_ty: Option<TypeAliasId>) -> Ty {
for i in 0..3 { self.resolve_associated_type_with_params(inner_ty, assoc_ty, &[])
if i > 0 { }
tested_by!(type_var_resolves_to_int_var);
} fn resolve_associated_type_with_params(
match &*ty { &mut self,
Ty::Infer(tv) => { inner_ty: Ty,
let inner = tv.to_inner(); assoc_ty: Option<TypeAliasId>,
match self.var_unification_table.inlined_probe_value(inner).known() { params: &[Ty],
Some(known_ty) => { ) -> Ty {
// The known_ty can't be a type var itself match assoc_ty {
ty = Cow::Owned(known_ty.clone()); Some(res_assoc_ty) => {
} let ty = self.table.new_type_var();
_ => return ty, let builder = Substs::build_for_def(self.db, res_assoc_ty)
} .push(inner_ty)
} .fill(params.iter().cloned());
_ => return ty, let projection = ProjectionPredicate {
ty: ty.clone(),
projection_ty: ProjectionTy {
associated_ty: res_assoc_ty,
parameters: builder.build(),
},
};
self.obligations.push(Obligation::Projection(projection));
self.resolve_ty_as_possible(ty)
} }
None => Ty::Unknown,
} }
log::error!("Inference variable still not resolved: {:?}", ty);
ty
} }
/// Recurses through the given type, normalizing associated types mentioned /// Recurses through the given type, normalizing associated types mentioned
@ -469,7 +404,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
/// call). `make_ty` handles this already, but e.g. for field types we need /// call). `make_ty` handles this already, but e.g. for field types we need
/// to do it as well. /// to do it as well.
fn normalize_associated_types_in(&mut self, ty: Ty) -> Ty { fn normalize_associated_types_in(&mut self, ty: Ty) -> Ty {
let ty = self.resolve_ty_as_possible(&mut vec![], ty); let ty = self.resolve_ty_as_possible(ty);
ty.fold(&mut |ty| match ty { ty.fold(&mut |ty| match ty {
Ty::Projection(proj_ty) => self.normalize_projection_ty(proj_ty), Ty::Projection(proj_ty) => self.normalize_projection_ty(proj_ty),
_ => ty, _ => ty,
@ -477,40 +412,13 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
} }
fn normalize_projection_ty(&mut self, proj_ty: ProjectionTy) -> Ty { fn normalize_projection_ty(&mut self, proj_ty: ProjectionTy) -> Ty {
let var = self.new_type_var(); let var = self.table.new_type_var();
let predicate = ProjectionPredicate { projection_ty: proj_ty, ty: var.clone() }; let predicate = ProjectionPredicate { projection_ty: proj_ty, ty: var.clone() };
let obligation = Obligation::Projection(predicate); let obligation = Obligation::Projection(predicate);
self.obligations.push(obligation); self.obligations.push(obligation);
var var
} }
/// Resolves the type completely; type variables without known type are
/// replaced by Ty::Unknown.
fn resolve_ty_completely(&mut self, tv_stack: &mut Vec<TypeVarId>, ty: Ty) -> Ty {
ty.fold(&mut |ty| match ty {
Ty::Infer(tv) => {
let inner = tv.to_inner();
if tv_stack.contains(&inner) {
tested_by!(type_var_cycles_resolve_completely);
// recursive type
return tv.fallback_value();
}
if let Some(known_ty) =
self.var_unification_table.inlined_probe_value(inner).known()
{
// known_ty may contain other variables that are known by now
tv_stack.push(inner);
let result = self.resolve_ty_completely(tv_stack, known_ty.clone());
tv_stack.pop();
result
} else {
tv.fallback_value()
}
}
_ => ty,
})
}
fn resolve_variant(&mut self, path: Option<&Path>) -> (Ty, Option<VariantId>) { fn resolve_variant(&mut self, path: Option<&Path>) -> (Ty, Option<VariantId>) {
let path = match path { let path = match path {
Some(path) => path, Some(path) => path,
@ -519,7 +427,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
let resolver = &self.resolver; let resolver = &self.resolver;
// FIXME: this should resolve assoc items as well, see this example: // FIXME: this should resolve assoc items as well, see this example:
// https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521 // https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521
match resolver.resolve_path_in_type_ns_fully(self.db, &path) { match resolver.resolve_path_in_type_ns_fully(self.db, path.mod_path()) {
Some(TypeNs::AdtId(AdtId::StructId(strukt))) => { Some(TypeNs::AdtId(AdtId::StructId(strukt))) => {
let substs = Ty::substs_from_path(self.db, resolver, path, strukt.into()); let substs = Ty::substs_from_path(self.db, resolver, path, strukt.into());
let ty = self.db.ty(strukt.into()); let ty = self.db.ty(strukt.into());
@ -547,93 +455,90 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
self.infer_pat(*pat, &ty, BindingMode::default()); self.infer_pat(*pat, &ty, BindingMode::default());
} }
self.return_ty = self.make_ty(&data.ret_type); let return_ty = self.make_ty(&data.ret_type);
self.return_ty = self.insert_vars_for_impl_trait(return_ty);
} }
fn infer_body(&mut self) { fn infer_body(&mut self) {
self.infer_expr(self.body.body_expr, &Expectation::has_type(self.return_ty.clone())); self.infer_expr_coerce(self.body.body_expr, &Expectation::has_type(self.return_ty.clone()));
} }
fn resolve_into_iter_item(&self) -> Option<TypeAliasId> { fn resolve_into_iter_item(&self) -> Option<TypeAliasId> {
let path = known::std_iter_into_iterator(); let path = path![std::iter::IntoIterator];
let trait_ = self.resolver.resolve_known_trait(self.db, &path)?; let trait_ = self.resolver.resolve_known_trait(self.db, &path)?;
self.db.trait_data(trait_).associated_type_by_name(&name::ITEM_TYPE) self.db.trait_data(trait_).associated_type_by_name(&name![Item])
} }
fn resolve_ops_try_ok(&self) -> Option<TypeAliasId> { fn resolve_ops_try_ok(&self) -> Option<TypeAliasId> {
let path = known::std_ops_try(); let path = path![std::ops::Try];
let trait_ = self.resolver.resolve_known_trait(self.db, &path)?; let trait_ = self.resolver.resolve_known_trait(self.db, &path)?;
self.db.trait_data(trait_).associated_type_by_name(&name::OK_TYPE) self.db.trait_data(trait_).associated_type_by_name(&name![Ok])
}
fn resolve_ops_neg_output(&self) -> Option<TypeAliasId> {
let path = path![std::ops::Neg];
let trait_ = self.resolver.resolve_known_trait(self.db, &path)?;
self.db.trait_data(trait_).associated_type_by_name(&name![Output])
}
fn resolve_ops_not_output(&self) -> Option<TypeAliasId> {
let path = path![std::ops::Not];
let trait_ = self.resolver.resolve_known_trait(self.db, &path)?;
self.db.trait_data(trait_).associated_type_by_name(&name![Output])
} }
fn resolve_future_future_output(&self) -> Option<TypeAliasId> { fn resolve_future_future_output(&self) -> Option<TypeAliasId> {
let path = known::std_future_future(); let path = path![std::future::Future];
let trait_ = self.resolver.resolve_known_trait(self.db, &path)?; let trait_ = self.resolver.resolve_known_trait(self.db, &path)?;
self.db.trait_data(trait_).associated_type_by_name(&name::OUTPUT_TYPE) self.db.trait_data(trait_).associated_type_by_name(&name![Output])
} }
fn resolve_boxed_box(&self) -> Option<AdtId> { fn resolve_boxed_box(&self) -> Option<AdtId> {
let path = known::std_boxed_box(); let path = path![std::boxed::Box];
let struct_ = self.resolver.resolve_known_struct(self.db, &path)?; let struct_ = self.resolver.resolve_known_struct(self.db, &path)?;
Some(struct_.into()) Some(struct_.into())
} }
}
/// The ID of a type variable. fn resolve_range_full(&self) -> Option<AdtId> {
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] let path = path![std::ops::RangeFull];
pub struct TypeVarId(pub(super) u32); let struct_ = self.resolver.resolve_known_struct(self.db, &path)?;
Some(struct_.into())
impl UnifyKey for TypeVarId {
type Value = TypeVarValue;
fn index(&self) -> u32 {
self.0
} }
fn from_index(i: u32) -> Self { fn resolve_range(&self) -> Option<AdtId> {
TypeVarId(i) let path = path![std::ops::Range];
let struct_ = self.resolver.resolve_known_struct(self.db, &path)?;
Some(struct_.into())
} }
fn tag() -> &'static str { fn resolve_range_inclusive(&self) -> Option<AdtId> {
"TypeVarId" let path = path![std::ops::RangeInclusive];
let struct_ = self.resolver.resolve_known_struct(self.db, &path)?;
Some(struct_.into())
} }
}
/// The value of a type variable: either we already know the type, or we don't fn resolve_range_from(&self) -> Option<AdtId> {
/// know it yet. let path = path![std::ops::RangeFrom];
#[derive(Clone, PartialEq, Eq, Debug)] let struct_ = self.resolver.resolve_known_struct(self.db, &path)?;
pub enum TypeVarValue { Some(struct_.into())
Known(Ty),
Unknown,
}
impl TypeVarValue {
fn known(&self) -> Option<&Ty> {
match self {
TypeVarValue::Known(ty) => Some(ty),
TypeVarValue::Unknown => None,
}
} }
}
impl UnifyValue for TypeVarValue { fn resolve_range_to(&self) -> Option<AdtId> {
type Error = NoError; let path = path![std::ops::RangeTo];
let struct_ = self.resolver.resolve_known_struct(self.db, &path)?;
Some(struct_.into())
}
fn unify_values(value1: &Self, value2: &Self) -> Result<Self, NoError> { fn resolve_range_to_inclusive(&self) -> Option<AdtId> {
match (value1, value2) { let path = path![std::ops::RangeToInclusive];
// We should never equate two type variables, both of which have let struct_ = self.resolver.resolve_known_struct(self.db, &path)?;
// known types. Instead, we recursively equate those types. Some(struct_.into())
(TypeVarValue::Known(t1), TypeVarValue::Known(t2)) => panic!( }
"equating two type variables, both of which have known types: {:?} and {:?}",
t1, t2
),
// If one side is known, prefer that one. fn resolve_ops_index_output(&self) -> Option<TypeAliasId> {
(TypeVarValue::Known(..), TypeVarValue::Unknown) => Ok(value1.clone()), let path = path![std::ops::Index];
(TypeVarValue::Unknown, TypeVarValue::Known(..)) => Ok(value2.clone()), let trait_ = self.resolver.resolve_known_trait(self.db, &path)?;
self.db.trait_data(trait_).associated_type_by_name(&name![Output])
(TypeVarValue::Unknown, TypeVarValue::Unknown) => Ok(TypeVarValue::Unknown),
}
} }
} }
@ -643,14 +548,14 @@ impl UnifyValue for TypeVarValue {
/// several integer types). /// several integer types).
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub enum InferTy { pub enum InferTy {
TypeVar(TypeVarId), TypeVar(unify::TypeVarId),
IntVar(TypeVarId), IntVar(unify::TypeVarId),
FloatVar(TypeVarId), FloatVar(unify::TypeVarId),
MaybeNeverTypeVar(TypeVarId), MaybeNeverTypeVar(unify::TypeVarId),
} }
impl InferTy { impl InferTy {
fn to_inner(self) -> TypeVarId { fn to_inner(self) -> unify::TypeVarId {
match self { match self {
InferTy::TypeVar(ty) InferTy::TypeVar(ty)
| InferTy::IntVar(ty) | InferTy::IntVar(ty)
@ -693,7 +598,7 @@ impl Expectation {
} }
mod diagnostics { mod diagnostics {
use hir_def::{expr::ExprId, FunctionId, HasSource, Lookup}; use hir_def::{expr::ExprId, src::HasSource, FunctionId, Lookup};
use hir_expand::diagnostics::DiagnosticSink; use hir_expand::diagnostics::DiagnosticSink;
use crate::{db::HirDatabase, diagnostics::NoSuchField}; use crate::{db::HirDatabase, diagnostics::NoSuchField};

View file

@ -8,9 +8,9 @@ use hir_def::{lang_item::LangItemTarget, resolver::Resolver, type_ref::Mutabilit
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use test_utils::tested_by; use test_utils::tested_by;
use crate::{autoderef, db::HirDatabase, ImplTy, Substs, Ty, TypeCtor, TypeWalk}; use crate::{autoderef, db::HirDatabase, Substs, Ty, TypeCtor, TypeWalk};
use super::{InEnvironment, InferTy, InferenceContext, TypeVarValue}; use super::{unify::TypeVarValue, InEnvironment, InferTy, InferenceContext};
impl<'a, D: HirDatabase> InferenceContext<'a, D> { impl<'a, D: HirDatabase> InferenceContext<'a, D> {
/// Unify two types, but may coerce the first one to the second one /// Unify two types, but may coerce the first one to the second one
@ -54,10 +54,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
impls impls
.iter() .iter()
.filter_map(|&impl_id| { .filter_map(|&impl_id| {
let trait_ref = match db.impl_ty(impl_id) { let trait_ref = db.impl_trait(impl_id)?;
ImplTy::TraitRef(it) => it,
ImplTy::Inherent(_) => return None,
};
// `CoerseUnsized` has one generic parameter for the target type. // `CoerseUnsized` has one generic parameter for the target type.
let cur_from_ty = trait_ref.substs.0.get(0)?; let cur_from_ty = trait_ref.substs.0.get(0)?;
@ -88,8 +85,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
match (&from_ty, to_ty) { match (&from_ty, to_ty) {
// Never type will make type variable to fallback to Never Type instead of Unknown. // Never type will make type variable to fallback to Never Type instead of Unknown.
(ty_app!(TypeCtor::Never), Ty::Infer(InferTy::TypeVar(tv))) => { (ty_app!(TypeCtor::Never), Ty::Infer(InferTy::TypeVar(tv))) => {
let var = self.new_maybe_never_type_var(); let var = self.table.new_maybe_never_type_var();
self.var_unification_table.union_value(*tv, TypeVarValue::Known(var)); self.table.var_unification_table.union_value(*tv, TypeVarValue::Known(var));
return true; return true;
} }
(ty_app!(TypeCtor::Never), _) => return true, (ty_app!(TypeCtor::Never), _) => return true,
@ -97,7 +94,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
// Trivial cases, this should go after `never` check to // Trivial cases, this should go after `never` check to
// avoid infer result type to be never // avoid infer result type to be never
_ => { _ => {
if self.unify_inner_trivial(&from_ty, &to_ty) { if self.table.unify_inner_trivial(&from_ty, &to_ty) {
return true; return true;
} }
} }
@ -137,6 +134,10 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
} }
} }
(ty_app!(TypeCtor::Closure { .. }, params), ty_app!(TypeCtor::FnPtr { .. })) => {
from_ty = params[0].clone();
}
_ => {} _ => {}
} }
@ -333,9 +334,13 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
// Stop when constructor matches. // Stop when constructor matches.
(ty_app!(from_ctor, st1), ty_app!(to_ctor, st2)) if from_ctor == to_ctor => { (ty_app!(from_ctor, st1), ty_app!(to_ctor, st2)) if from_ctor == to_ctor => {
// It will not recurse to `coerce`. // It will not recurse to `coerce`.
return self.unify_substs(st1, st2, 0); return self.table.unify_substs(st1, st2, 0);
}
_ => {
if self.table.unify_inner_trivial(&derefed_ty, &to_ty) {
return true;
}
} }
_ => {}
} }
} }

View file

@ -6,17 +6,21 @@ use std::sync::Arc;
use hir_def::{ use hir_def::{
builtin_type::Signedness, builtin_type::Signedness,
expr::{Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp}, expr::{Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp},
generics::GenericParams,
path::{GenericArg, GenericArgs}, path::{GenericArg, GenericArgs},
resolver::resolver_for_expr, resolver::resolver_for_expr,
AdtId, ContainerId, Lookup, StructFieldId, AdtId, AssocContainerId, Lookup, StructFieldId,
}; };
use hir_expand::name::{self, Name}; use hir_expand::name::{name, Name};
use ra_syntax::ast::RangeOp;
use crate::{ use crate::{
autoderef, db::HirDatabase, method_resolution, op, traits::InEnvironment, utils::variant_data, autoderef,
CallableDef, InferTy, IntTy, Mutability, Obligation, ProjectionPredicate, ProjectionTy, Substs, db::HirDatabase,
TraitRef, Ty, TypeCtor, TypeWalk, Uncertain, method_resolution, op,
traits::InEnvironment,
utils::{generics, variant_data, Generics},
ApplicationTy, CallableDef, InferTy, IntTy, Mutability, Obligation, Substs, TraitRef, Ty,
TypeCtor, TypeWalk, Uncertain,
}; };
use super::{BindingMode, Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch}; use super::{BindingMode, Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch};
@ -31,13 +35,13 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() }, TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() },
); );
} }
let ty = self.resolve_ty_as_possible(&mut vec![], ty); let ty = self.resolve_ty_as_possible(ty);
ty ty
} }
/// Infer type of expression with possibly implicit coerce to the expected type. /// Infer type of expression with possibly implicit coerce to the expected type.
/// Return the type after possible coercion. /// Return the type after possible coercion.
fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty { pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty {
let ty = self.infer_expr_inner(expr, &expected); let ty = self.infer_expr_inner(expr, &expected);
let ty = if !self.coerce(&ty, &expected.ty) { let ty = if !self.coerce(&ty, &expected.ty) {
self.result self.result
@ -52,7 +56,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
expected.ty.clone() expected.ty.clone()
}; };
self.resolve_ty_as_possible(&mut vec![], ty) self.resolve_ty_as_possible(ty)
} }
fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty { fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
@ -91,27 +95,14 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
Expr::For { iterable, body, pat } => { Expr::For { iterable, body, pat } => {
let iterable_ty = self.infer_expr(*iterable, &Expectation::none()); let iterable_ty = self.infer_expr(*iterable, &Expectation::none());
let pat_ty = match self.resolve_into_iter_item() { let pat_ty =
Some(into_iter_item_alias) => { self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item());
let pat_ty = self.new_type_var();
let projection = ProjectionPredicate {
ty: pat_ty.clone(),
projection_ty: ProjectionTy {
associated_ty: into_iter_item_alias,
parameters: Substs::single(iterable_ty),
},
};
self.obligations.push(Obligation::Projection(projection));
self.resolve_ty_as_possible(&mut vec![], pat_ty)
}
None => Ty::Unknown,
};
self.infer_pat(*pat, &pat_ty, BindingMode::default()); self.infer_pat(*pat, &pat_ty, BindingMode::default());
self.infer_expr(*body, &Expectation::has_type(Ty::unit())); self.infer_expr(*body, &Expectation::has_type(Ty::unit()));
Ty::unit() Ty::unit()
} }
Expr::Lambda { body, args, arg_types } => { Expr::Lambda { body, args, ret_type, arg_types } => {
assert_eq!(args.len(), arg_types.len()); assert_eq!(args.len(), arg_types.len());
let mut sig_tys = Vec::new(); let mut sig_tys = Vec::new();
@ -127,7 +118,10 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
} }
// add return type // add return type
let ret_ty = self.new_type_var(); let ret_ty = match ret_type {
Some(type_ref) => self.make_ty(type_ref),
None => self.table.new_type_var(),
};
sig_tys.push(ret_ty.clone()); sig_tys.push(ret_ty.clone());
let sig_ty = Ty::apply( let sig_ty = Ty::apply(
TypeCtor::FnPtr { num_args: sig_tys.len() as u16 - 1 }, TypeCtor::FnPtr { num_args: sig_tys.len() as u16 - 1 },
@ -143,7 +137,12 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
// infer the body. // infer the body.
self.coerce(&closure_ty, &expected.ty); self.coerce(&closure_ty, &expected.ty);
self.infer_expr(*body, &Expectation::has_type(ret_ty)); let prev_ret_ty = std::mem::replace(&mut self.return_ty, ret_ty.clone());
self.infer_expr_coerce(*body, &Expectation::has_type(ret_ty));
self.return_ty = prev_ret_ty;
closure_ty closure_ty
} }
Expr::Call { callee, args } => { Expr::Call { callee, args } => {
@ -166,7 +165,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
Expr::Match { expr, arms } => { Expr::Match { expr, arms } => {
let input_ty = self.infer_expr(*expr, &Expectation::none()); let input_ty = self.infer_expr(*expr, &Expectation::none());
let mut result_ty = self.new_maybe_never_type_var(); let mut result_ty = self.table.new_maybe_never_type_var();
for arm in arms { for arm in arms {
for &pat in &arm.pats { for &pat in &arm.pats {
@ -200,7 +199,10 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
} }
Expr::Return { expr } => { Expr::Return { expr } => {
if let Some(expr) = expr { if let Some(expr) = expr {
self.infer_expr(*expr, &Expectation::has_type(self.return_ty.clone())); self.infer_expr_coerce(*expr, &Expectation::has_type(self.return_ty.clone()));
} else {
let unit = Ty::unit();
self.coerce(&unit, &self.return_ty.clone());
} }
Ty::simple(TypeCtor::Never) Ty::simple(TypeCtor::Never)
} }
@ -244,7 +246,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
ty ty
} }
Expr::Field { expr, name } => { Expr::Field { expr, name } => {
let receiver_ty = self.infer_expr(*expr, &Expectation::none()); let receiver_ty = self.infer_expr_inner(*expr, &Expectation::none());
let canonicalized = self.canonicalizer().canonicalize_ty(receiver_ty); let canonicalized = self.canonicalizer().canonicalize_ty(receiver_ty);
let ty = autoderef::autoderef( let ty = autoderef::autoderef(
self.db, self.db,
@ -279,45 +281,18 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
self.normalize_associated_types_in(ty) self.normalize_associated_types_in(ty)
} }
Expr::Await { expr } => { Expr::Await { expr } => {
let inner_ty = self.infer_expr(*expr, &Expectation::none()); let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
let ty = match self.resolve_future_future_output() { let ty =
Some(future_future_output_alias) => { self.resolve_associated_type(inner_ty, self.resolve_future_future_output());
let ty = self.new_type_var();
let projection = ProjectionPredicate {
ty: ty.clone(),
projection_ty: ProjectionTy {
associated_ty: future_future_output_alias,
parameters: Substs::single(inner_ty),
},
};
self.obligations.push(Obligation::Projection(projection));
self.resolve_ty_as_possible(&mut vec![], ty)
}
None => Ty::Unknown,
};
ty ty
} }
Expr::Try { expr } => { Expr::Try { expr } => {
let inner_ty = self.infer_expr(*expr, &Expectation::none()); let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
let ty = match self.resolve_ops_try_ok() { let ty = self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok());
Some(ops_try_ok_alias) => {
let ty = self.new_type_var();
let projection = ProjectionPredicate {
ty: ty.clone(),
projection_ty: ProjectionTy {
associated_ty: ops_try_ok_alias,
parameters: Substs::single(inner_ty),
},
};
self.obligations.push(Obligation::Projection(projection));
self.resolve_ty_as_possible(&mut vec![], ty)
}
None => Ty::Unknown,
};
ty ty
} }
Expr::Cast { expr, type_ref } => { Expr::Cast { expr, type_ref } => {
let _inner_ty = self.infer_expr(*expr, &Expectation::none()); let _inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
let cast_ty = self.make_ty(type_ref); let cast_ty = self.make_ty(type_ref);
// FIXME check the cast... // FIXME check the cast...
cast_ty cast_ty
@ -333,12 +308,11 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
} else { } else {
Expectation::none() Expectation::none()
}; };
// FIXME reference coercions etc. let inner_ty = self.infer_expr_inner(*expr, &expectation);
let inner_ty = self.infer_expr(*expr, &expectation);
Ty::apply_one(TypeCtor::Ref(*mutability), inner_ty) Ty::apply_one(TypeCtor::Ref(*mutability), inner_ty)
} }
Expr::Box { expr } => { Expr::Box { expr } => {
let inner_ty = self.infer_expr(*expr, &Expectation::none()); let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
if let Some(box_) = self.resolve_boxed_box() { if let Some(box_) = self.resolve_boxed_box() {
Ty::apply_one(TypeCtor::Adt(box_), inner_ty) Ty::apply_one(TypeCtor::Adt(box_), inner_ty)
} else { } else {
@ -346,7 +320,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
} }
} }
Expr::UnaryOp { expr, op } => { Expr::UnaryOp { expr, op } => {
let inner_ty = self.infer_expr(*expr, &Expectation::none()); let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
match op { match op {
UnaryOp::Deref => match self.resolver.krate() { UnaryOp::Deref => match self.resolver.krate() {
Some(krate) => { Some(krate) => {
@ -369,31 +343,36 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
}, },
UnaryOp::Neg => { UnaryOp::Neg => {
match &inner_ty { match &inner_ty {
Ty::Apply(a_ty) => match a_ty.ctor { // Fast path for builtins
TypeCtor::Int(Uncertain::Unknown) Ty::Apply(ApplicationTy {
| TypeCtor::Int(Uncertain::Known(IntTy { ctor:
signedness: Signedness::Signed, TypeCtor::Int(Uncertain::Known(IntTy {
.. signedness: Signedness::Signed,
})) ..
| TypeCtor::Float(..) => inner_ty, })),
_ => Ty::Unknown, ..
}, })
Ty::Infer(InferTy::IntVar(..)) | Ty::Infer(InferTy::FloatVar(..)) => { | Ty::Apply(ApplicationTy {
inner_ty ctor: TypeCtor::Int(Uncertain::Unknown),
} ..
// FIXME: resolve ops::Neg trait })
_ => Ty::Unknown, | Ty::Apply(ApplicationTy { ctor: TypeCtor::Float(_), .. })
| Ty::Infer(InferTy::IntVar(..))
| Ty::Infer(InferTy::FloatVar(..)) => inner_ty,
// Otherwise we resolve via the std::ops::Neg trait
_ => self
.resolve_associated_type(inner_ty, self.resolve_ops_neg_output()),
} }
} }
UnaryOp::Not => { UnaryOp::Not => {
match &inner_ty { match &inner_ty {
Ty::Apply(a_ty) => match a_ty.ctor { // Fast path for builtins
TypeCtor::Bool | TypeCtor::Int(_) => inner_ty, Ty::Apply(ApplicationTy { ctor: TypeCtor::Bool, .. })
_ => Ty::Unknown, | Ty::Apply(ApplicationTy { ctor: TypeCtor::Int(_), .. })
}, | Ty::Infer(InferTy::IntVar(..)) => inner_ty,
Ty::Infer(InferTy::IntVar(..)) => inner_ty, // Otherwise we resolve via the std::ops::Not trait
// FIXME: resolve ops::Not trait for inner_ty _ => self
_ => Ty::Unknown, .resolve_associated_type(inner_ty, self.resolve_ops_not_output()),
} }
} }
} }
@ -415,21 +394,63 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
} }
_ => Ty::Unknown, _ => Ty::Unknown,
}, },
Expr::Range { lhs, rhs, range_type } => {
let lhs_ty = lhs.map(|e| self.infer_expr_inner(e, &Expectation::none()));
let rhs_expect = lhs_ty
.as_ref()
.map_or_else(Expectation::none, |ty| Expectation::has_type(ty.clone()));
let rhs_ty = rhs.map(|e| self.infer_expr(e, &rhs_expect));
match (range_type, lhs_ty, rhs_ty) {
(RangeOp::Exclusive, None, None) => match self.resolve_range_full() {
Some(adt) => Ty::simple(TypeCtor::Adt(adt)),
None => Ty::Unknown,
},
(RangeOp::Exclusive, None, Some(ty)) => match self.resolve_range_to() {
Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
None => Ty::Unknown,
},
(RangeOp::Inclusive, None, Some(ty)) => {
match self.resolve_range_to_inclusive() {
Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
None => Ty::Unknown,
}
}
(RangeOp::Exclusive, Some(_), Some(ty)) => match self.resolve_range() {
Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
None => Ty::Unknown,
},
(RangeOp::Inclusive, Some(_), Some(ty)) => {
match self.resolve_range_inclusive() {
Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
None => Ty::Unknown,
}
}
(RangeOp::Exclusive, Some(ty), None) => match self.resolve_range_from() {
Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
None => Ty::Unknown,
},
(RangeOp::Inclusive, _, None) => Ty::Unknown,
}
}
Expr::Index { base, index } => { Expr::Index { base, index } => {
let _base_ty = self.infer_expr(*base, &Expectation::none()); let base_ty = self.infer_expr_inner(*base, &Expectation::none());
let _index_ty = self.infer_expr(*index, &Expectation::none()); let index_ty = self.infer_expr(*index, &Expectation::none());
// FIXME: use `std::ops::Index::Output` to figure out the real return type
Ty::Unknown self.resolve_associated_type_with_params(
base_ty,
self.resolve_ops_index_output(),
&[index_ty],
)
} }
Expr::Tuple { exprs } => { Expr::Tuple { exprs } => {
let mut tys = match &expected.ty { let mut tys = match &expected.ty {
ty_app!(TypeCtor::Tuple { .. }, st) => st ty_app!(TypeCtor::Tuple { .. }, st) => st
.iter() .iter()
.cloned() .cloned()
.chain(repeat_with(|| self.new_type_var())) .chain(repeat_with(|| self.table.new_type_var()))
.take(exprs.len()) .take(exprs.len())
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
_ => (0..exprs.len()).map(|_| self.new_type_var()).collect(), _ => (0..exprs.len()).map(|_| self.table.new_type_var()).collect(),
}; };
for (expr, ty) in exprs.iter().zip(tys.iter_mut()) { for (expr, ty) in exprs.iter().zip(tys.iter_mut()) {
@ -443,7 +464,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
ty_app!(TypeCtor::Array, st) | ty_app!(TypeCtor::Slice, st) => { ty_app!(TypeCtor::Array, st) | ty_app!(TypeCtor::Slice, st) => {
st.as_single().clone() st.as_single().clone()
} }
_ => self.new_type_var(), _ => self.table.new_type_var(),
}; };
match array { match array {
@ -485,7 +506,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
}; };
// use a new type variable if we got Ty::Unknown here // use a new type variable if we got Ty::Unknown here
let ty = self.insert_type_vars_shallow(ty); let ty = self.insert_type_vars_shallow(ty);
let ty = self.resolve_ty_as_possible(&mut vec![], ty); let ty = self.resolve_ty_as_possible(ty);
self.write_expr_ty(tgt_expr, ty.clone()); self.write_expr_ty(tgt_expr, ty.clone());
ty ty
} }
@ -514,7 +535,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
} }
} }
let ty = self.resolve_ty_as_possible(&mut vec![], ty); let ty = self.resolve_ty_as_possible(ty);
self.infer_pat(*pat, &ty, BindingMode::default()); self.infer_pat(*pat, &ty, BindingMode::default());
} }
Statement::Expr(expr) => { Statement::Expr(expr) => {
@ -558,7 +579,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
Some((ty, func)) => { Some((ty, func)) => {
let ty = canonicalized_receiver.decanonicalize_ty(ty); let ty = canonicalized_receiver.decanonicalize_ty(ty);
self.write_method_resolution(tgt_expr, func); self.write_method_resolution(tgt_expr, func);
(ty, self.db.value_ty(func.into()), Some(self.db.generic_params(func.into()))) (ty, self.db.value_ty(func.into()), Some(generics(self.db, func.into())))
} }
None => (receiver_ty, Ty::Unknown, None), None => (receiver_ty, Ty::Unknown, None),
}; };
@ -607,6 +628,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
continue; continue;
} }
let param_ty = self.insert_vars_for_impl_trait(param_ty);
let param_ty = self.normalize_associated_types_in(param_ty); let param_ty = self.normalize_associated_types_in(param_ty);
self.infer_expr_coerce(arg, &Expectation::has_type(param_ty.clone())); self.infer_expr_coerce(arg, &Expectation::has_type(param_ty.clone()));
} }
@ -615,17 +637,17 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
fn substs_for_method_call( fn substs_for_method_call(
&mut self, &mut self,
def_generics: Option<Arc<GenericParams>>, def_generics: Option<Generics>,
generic_args: Option<&GenericArgs>, generic_args: Option<&GenericArgs>,
receiver_ty: &Ty, receiver_ty: &Ty,
) -> Substs { ) -> Substs {
let (parent_param_count, param_count) = let (total_len, _parent_len, child_len) =
def_generics.as_ref().map_or((0, 0), |g| (g.count_parent_params(), g.params.len())); def_generics.as_ref().map_or((0, 0, 0), |g| g.len_split());
let mut substs = Vec::with_capacity(parent_param_count + param_count); let mut substs = Vec::with_capacity(total_len);
// Parent arguments are unknown, except for the receiver type // Parent arguments are unknown, except for the receiver type
if let Some(parent_generics) = def_generics.and_then(|p| p.parent_params.clone()) { if let Some(parent_generics) = def_generics.as_ref().map(|p| p.iter_parent()) {
for param in &parent_generics.params { for (_id, param) in parent_generics {
if param.name == name::SELF_TYPE { if param.name == name![Self] {
substs.push(receiver_ty.clone()); substs.push(receiver_ty.clone());
} else { } else {
substs.push(Ty::Unknown); substs.push(Ty::Unknown);
@ -635,7 +657,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
// handle provided type arguments // handle provided type arguments
if let Some(generic_args) = generic_args { if let Some(generic_args) = generic_args {
// if args are provided, it should be all of them, but we can't rely on that // if args are provided, it should be all of them, but we can't rely on that
for arg in generic_args.args.iter().take(param_count) { for arg in generic_args.args.iter().take(child_len) {
match arg { match arg {
GenericArg::Type(type_ref) => { GenericArg::Type(type_ref) => {
let ty = self.make_ty(type_ref); let ty = self.make_ty(type_ref);
@ -645,10 +667,10 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
} }
}; };
let supplied_params = substs.len(); let supplied_params = substs.len();
for _ in supplied_params..parent_param_count + param_count { for _ in supplied_params..total_len {
substs.push(Ty::Unknown); substs.push(Ty::Unknown);
} }
assert_eq!(substs.len(), parent_param_count + param_count); assert_eq!(substs.len(), total_len);
Substs(substs.into()) Substs(substs.into())
} }
@ -665,13 +687,10 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
// add obligation for trait implementation, if this is a trait method // add obligation for trait implementation, if this is a trait method
match def { match def {
CallableDef::FunctionId(f) => { CallableDef::FunctionId(f) => {
if let ContainerId::TraitId(trait_) = f.lookup(self.db).container { if let AssocContainerId::TraitId(trait_) = f.lookup(self.db).container {
// construct a TraitDef // construct a TraitDef
let substs = a_ty.parameters.prefix( let substs =
self.db a_ty.parameters.prefix(generics(self.db, trait_.into()).len());
.generic_params(trait_.into())
.count_params_including_parent(),
);
self.obligations.push(Obligation::Trait(TraitRef { self.obligations.push(Obligation::Trait(TraitRef {
trait_: trait_.into(), trait_: trait_.into(),
substs, substs,

View file

@ -170,7 +170,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
} }
BindingMode::Move => inner_ty.clone(), BindingMode::Move => inner_ty.clone(),
}; };
let bound_ty = self.resolve_ty_as_possible(&mut vec![], bound_ty); let bound_ty = self.resolve_ty_as_possible(bound_ty);
self.write_pat_ty(pat, bound_ty); self.write_pat_ty(pat, bound_ty);
return inner_ty; return inner_ty;
} }
@ -179,7 +179,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
// use a new type variable if we got Ty::Unknown here // use a new type variable if we got Ty::Unknown here
let ty = self.insert_type_vars_shallow(ty); let ty = self.insert_type_vars_shallow(ty);
self.unify(&ty, expected); self.unify(&ty, expected);
let ty = self.resolve_ty_as_possible(&mut vec![], ty); let ty = self.resolve_ty_as_possible(ty);
self.write_pat_ty(pat, ty.clone()); self.write_pat_ty(pat, ty.clone());
ty ty
} }

View file

@ -1,9 +1,11 @@
//! Path expression resolution. //! Path expression resolution.
use std::iter;
use hir_def::{ use hir_def::{
path::{Path, PathKind, PathSegment}, path::{Path, PathSegment},
resolver::{ResolveValueResult, Resolver, TypeNs, ValueNs}, resolver::{ResolveValueResult, Resolver, TypeNs, ValueNs},
AssocItemId, ContainerId, Lookup, AssocContainerId, AssocItemId, Lookup,
}; };
use hir_expand::name::Name; use hir_expand::name::Name;
@ -30,21 +32,21 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
path: &Path, path: &Path,
id: ExprOrPatId, id: ExprOrPatId,
) -> Option<Ty> { ) -> Option<Ty> {
let (value, self_subst) = if let PathKind::Type(type_ref) = &path.kind { let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
if path.segments.is_empty() { if path.segments().is_empty() {
// This can't actually happen syntax-wise // This can't actually happen syntax-wise
return None; return None;
} }
let ty = self.make_ty(type_ref); let ty = self.make_ty(type_ref);
let remaining_segments_for_ty = &path.segments[..path.segments.len() - 1]; let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
let ty = Ty::from_type_relative_path(self.db, resolver, ty, remaining_segments_for_ty); let ty = Ty::from_type_relative_path(self.db, resolver, ty, remaining_segments_for_ty);
self.resolve_ty_assoc_item( self.resolve_ty_assoc_item(
ty, ty,
&path.segments.last().expect("path had at least one segment").name, &path.segments().last().expect("path had at least one segment").name,
id, id,
)? )?
} else { } else {
let value_or_partial = resolver.resolve_path_in_value_ns(self.db, &path)?; let value_or_partial = resolver.resolve_path_in_value_ns(self.db, path.mod_path())?;
match value_or_partial { match value_or_partial {
ResolveValueResult::ValueNs(it) => (it, None), ResolveValueResult::ValueNs(it) => (it, None),
@ -57,7 +59,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
let typable: ValueTyDefId = match value { let typable: ValueTyDefId = match value {
ValueNs::LocalBinding(pat) => { ValueNs::LocalBinding(pat) => {
let ty = self.result.type_of_pat.get(pat)?.clone(); let ty = self.result.type_of_pat.get(pat)?.clone();
let ty = self.resolve_ty_as_possible(&mut vec![], ty); let ty = self.resolve_ty_as_possible(ty);
return Some(ty); return Some(ty);
} }
ValueNs::FunctionId(it) => it.into(), ValueNs::FunctionId(it) => it.into(),
@ -83,13 +85,13 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
remaining_index: usize, remaining_index: usize,
id: ExprOrPatId, id: ExprOrPatId,
) -> Option<(ValueNs, Option<Substs>)> { ) -> Option<(ValueNs, Option<Substs>)> {
assert!(remaining_index < path.segments.len()); assert!(remaining_index < path.segments().len());
// there may be more intermediate segments between the resolved one and // there may be more intermediate segments between the resolved one and
// the end. Only the last segment needs to be resolved to a value; from // the end. Only the last segment needs to be resolved to a value; from
// the segments before that, we need to get either a type or a trait ref. // the segments before that, we need to get either a type or a trait ref.
let resolved_segment = &path.segments[remaining_index - 1]; let resolved_segment = path.segments().get(remaining_index - 1).unwrap();
let remaining_segments = &path.segments[remaining_index..]; let remaining_segments = path.segments().skip(remaining_index);
let is_before_last = remaining_segments.len() == 1; let is_before_last = remaining_segments.len() == 1;
match (def, is_before_last) { match (def, is_before_last) {
@ -110,7 +112,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
// trait but it's not the last segment, so the next segment // trait but it's not the last segment, so the next segment
// should resolve to an associated type of that trait (e.g. `<T // should resolve to an associated type of that trait (e.g. `<T
// as Iterator>::Item::default`) // as Iterator>::Item::default`)
let remaining_segments_for_ty = &remaining_segments[..remaining_segments.len() - 1]; let remaining_segments_for_ty =
remaining_segments.take(remaining_segments.len() - 1);
let ty = Ty::from_partly_resolved_hir_path( let ty = Ty::from_partly_resolved_hir_path(
self.db, self.db,
&self.resolver, &self.resolver,
@ -136,7 +139,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
fn resolve_trait_assoc_item( fn resolve_trait_assoc_item(
&mut self, &mut self,
trait_ref: TraitRef, trait_ref: TraitRef,
segment: &PathSegment, segment: PathSegment<'_>,
id: ExprOrPatId, id: ExprOrPatId,
) -> Option<(ValueNs, Option<Substs>)> { ) -> Option<(ValueNs, Option<Substs>)> {
let trait_ = trait_ref.trait_; let trait_ = trait_ref.trait_;
@ -148,7 +151,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
.map(|(_name, id)| (*id).into()) .map(|(_name, id)| (*id).into())
.find_map(|item| match item { .find_map(|item| match item {
AssocItemId::FunctionId(func) => { AssocItemId::FunctionId(func) => {
if segment.name == self.db.function_data(func).name { if segment.name == &self.db.function_data(func).name {
Some(AssocItemId::FunctionId(func)) Some(AssocItemId::FunctionId(func))
} else { } else {
None None
@ -156,7 +159,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
} }
AssocItemId::ConstId(konst) => { AssocItemId::ConstId(konst) => {
if self.db.const_data(konst).name.as_ref().map_or(false, |n| n == &segment.name) if self.db.const_data(konst).name.as_ref().map_or(false, |n| n == segment.name)
{ {
Some(AssocItemId::ConstId(konst)) Some(AssocItemId::ConstId(konst))
} else { } else {
@ -206,12 +209,23 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
AssocItemId::TypeAliasId(_) => unreachable!(), AssocItemId::TypeAliasId(_) => unreachable!(),
}; };
let substs = match container { let substs = match container {
ContainerId::ImplId(_) => self.find_self_types(&def, ty.clone()), AssocContainerId::ImplId(impl_id) => {
ContainerId::TraitId(trait_) => { let impl_substs = Substs::build_for_def(self.db, impl_id)
.fill(iter::repeat_with(|| self.table.new_type_var()))
.build();
let impl_self_ty = self.db.impl_self_ty(impl_id).subst(&impl_substs);
let substs = Substs::build_for_def(self.db, item)
.use_parent_substs(&impl_substs)
.fill_with_params()
.build();
self.unify(&impl_self_ty, &ty);
Some(substs)
}
AssocContainerId::TraitId(trait_) => {
// we're picking this method // we're picking this method
let trait_substs = Substs::build_for_def(self.db, trait_) let trait_substs = Substs::build_for_def(self.db, trait_)
.push(ty.clone()) .push(ty.clone())
.fill(std::iter::repeat_with(|| self.new_type_var())) .fill(std::iter::repeat_with(|| self.table.new_type_var()))
.build(); .build();
let substs = Substs::build_for_def(self.db, item) let substs = Substs::build_for_def(self.db, item)
.use_parent_substs(&trait_substs) .use_parent_substs(&trait_substs)
@ -223,7 +237,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
})); }));
Some(substs) Some(substs)
} }
ContainerId::ModuleId(_) => None, AssocContainerId::ContainerId(_) => None,
}; };
self.write_assoc_resolution(id, item.into()); self.write_assoc_resolution(id, item.into());
@ -231,38 +245,4 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
}, },
) )
} }
fn find_self_types(&self, def: &ValueNs, actual_def_ty: Ty) -> Option<Substs> {
if let ValueNs::FunctionId(func) = *def {
// We only do the infer if parent has generic params
let gen = self.db.generic_params(func.into());
if gen.count_parent_params() == 0 {
return None;
}
let impl_id = match func.lookup(self.db).container {
ContainerId::ImplId(it) => it,
_ => return None,
};
let self_ty = self.db.impl_ty(impl_id).self_type().clone();
let self_ty_substs = self_ty.substs()?;
let actual_substs = actual_def_ty.substs()?;
let mut new_substs = vec![Ty::Unknown; gen.count_parent_params()];
// The following code *link up* the function actual parma type
// and impl_block type param index
self_ty_substs.iter().zip(actual_substs.iter()).for_each(|(param, pty)| {
if let Ty::Param { idx, .. } = param {
if let Some(s) = new_substs.get_mut(*idx as usize) {
*s = pty.clone();
}
}
});
Some(Substs(new_substs.into()))
} else {
None
}
}
} }

View file

@ -1,9 +1,15 @@
//! Unification and canonicalization logic. //! Unification and canonicalization logic.
use std::borrow::Cow;
use ena::unify::{InPlaceUnificationTable, NoError, UnifyKey, UnifyValue};
use test_utils::tested_by;
use super::{InferenceContext, Obligation}; use super::{InferenceContext, Obligation};
use crate::{ use crate::{
db::HirDatabase, utils::make_mut_slice, Canonical, InEnvironment, InferTy, ProjectionPredicate, db::HirDatabase, utils::make_mut_slice, Canonical, InEnvironment, InferTy, ProjectionPredicate,
ProjectionTy, Substs, TraitRef, Ty, TypeWalk, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, TypeWalk,
}; };
impl<'a, D: HirDatabase> InferenceContext<'a, D> { impl<'a, D: HirDatabase> InferenceContext<'a, D> {
@ -24,7 +30,7 @@ where
/// A stack of type variables that is used to detect recursive types (which /// A stack of type variables that is used to detect recursive types (which
/// are an error, but we need to protect against them to avoid stack /// are an error, but we need to protect against them to avoid stack
/// overflows). /// overflows).
var_stack: Vec<super::TypeVarId>, var_stack: Vec<TypeVarId>,
} }
pub(super) struct Canonicalized<T> { pub(super) struct Canonicalized<T> {
@ -53,14 +59,14 @@ where
return tv.fallback_value(); return tv.fallback_value();
} }
if let Some(known_ty) = if let Some(known_ty) =
self.ctx.var_unification_table.inlined_probe_value(inner).known() self.ctx.table.var_unification_table.inlined_probe_value(inner).known()
{ {
self.var_stack.push(inner); self.var_stack.push(inner);
let result = self.do_canonicalize_ty(known_ty.clone()); let result = self.do_canonicalize_ty(known_ty.clone());
self.var_stack.pop(); self.var_stack.pop();
result result
} else { } else {
let root = self.ctx.var_unification_table.find(inner); let root = self.ctx.table.var_unification_table.find(inner);
let free_var = match tv { let free_var = match tv {
InferTy::TypeVar(_) => InferTy::TypeVar(root), InferTy::TypeVar(_) => InferTy::TypeVar(root),
InferTy::IntVar(_) => InferTy::IntVar(root), InferTy::IntVar(_) => InferTy::IntVar(root),
@ -153,10 +159,268 @@ impl<T> Canonicalized<T> {
solution: Canonical<Vec<Ty>>, solution: Canonical<Vec<Ty>>,
) { ) {
// the solution may contain new variables, which we need to convert to new inference vars // the solution may contain new variables, which we need to convert to new inference vars
let new_vars = Substs((0..solution.num_vars).map(|_| ctx.new_type_var()).collect()); let new_vars = Substs((0..solution.num_vars).map(|_| ctx.table.new_type_var()).collect());
for (i, ty) in solution.value.into_iter().enumerate() { for (i, ty) in solution.value.into_iter().enumerate() {
let var = self.free_vars[i]; let var = self.free_vars[i];
ctx.unify(&Ty::Infer(var), &ty.subst_bound_vars(&new_vars)); ctx.table.unify(&Ty::Infer(var), &ty.subst_bound_vars(&new_vars));
}
}
}
pub fn unify(ty1: &Canonical<Ty>, ty2: &Canonical<Ty>) -> Option<Substs> {
let mut table = InferenceTable::new();
let vars =
Substs::builder(ty1.num_vars).fill(std::iter::repeat_with(|| table.new_type_var())).build();
let ty_with_vars = ty1.value.clone().subst_bound_vars(&vars);
if !table.unify(&ty_with_vars, &ty2.value) {
return None;
}
Some(
Substs::builder(ty1.num_vars)
.fill(vars.iter().map(|v| table.resolve_ty_completely(v.clone())))
.build(),
)
}
#[derive(Clone, Debug)]
pub(crate) struct InferenceTable {
pub(super) var_unification_table: InPlaceUnificationTable<TypeVarId>,
}
impl InferenceTable {
pub fn new() -> Self {
InferenceTable { var_unification_table: InPlaceUnificationTable::new() }
}
pub fn new_type_var(&mut self) -> Ty {
Ty::Infer(InferTy::TypeVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
}
pub fn new_integer_var(&mut self) -> Ty {
Ty::Infer(InferTy::IntVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
}
pub fn new_float_var(&mut self) -> Ty {
Ty::Infer(InferTy::FloatVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
}
pub fn new_maybe_never_type_var(&mut self) -> Ty {
Ty::Infer(InferTy::MaybeNeverTypeVar(
self.var_unification_table.new_key(TypeVarValue::Unknown),
))
}
pub fn resolve_ty_completely(&mut self, ty: Ty) -> Ty {
self.resolve_ty_completely_inner(&mut Vec::new(), ty)
}
pub fn resolve_ty_as_possible(&mut self, ty: Ty) -> Ty {
self.resolve_ty_as_possible_inner(&mut Vec::new(), ty)
}
pub fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
self.unify_inner(ty1, ty2, 0)
}
pub fn unify_substs(&mut self, substs1: &Substs, substs2: &Substs, depth: usize) -> bool {
substs1.0.iter().zip(substs2.0.iter()).all(|(t1, t2)| self.unify_inner(t1, t2, depth))
}
fn unify_inner(&mut self, ty1: &Ty, ty2: &Ty, depth: usize) -> bool {
if depth > 1000 {
// prevent stackoverflows
panic!("infinite recursion in unification");
}
if ty1 == ty2 {
return true;
}
// try to resolve type vars first
let ty1 = self.resolve_ty_shallow(ty1);
let ty2 = self.resolve_ty_shallow(ty2);
match (&*ty1, &*ty2) {
(Ty::Apply(a_ty1), Ty::Apply(a_ty2)) if a_ty1.ctor == a_ty2.ctor => {
self.unify_substs(&a_ty1.parameters, &a_ty2.parameters, depth + 1)
}
_ => self.unify_inner_trivial(&ty1, &ty2),
}
}
pub(super) fn unify_inner_trivial(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
match (ty1, ty2) {
(Ty::Unknown, _) | (_, Ty::Unknown) => true,
(Ty::Infer(InferTy::TypeVar(tv1)), Ty::Infer(InferTy::TypeVar(tv2)))
| (Ty::Infer(InferTy::IntVar(tv1)), Ty::Infer(InferTy::IntVar(tv2)))
| (Ty::Infer(InferTy::FloatVar(tv1)), Ty::Infer(InferTy::FloatVar(tv2)))
| (
Ty::Infer(InferTy::MaybeNeverTypeVar(tv1)),
Ty::Infer(InferTy::MaybeNeverTypeVar(tv2)),
) => {
// both type vars are unknown since we tried to resolve them
self.var_unification_table.union(*tv1, *tv2);
true
}
// The order of MaybeNeverTypeVar matters here.
// Unifying MaybeNeverTypeVar and TypeVar will let the latter become MaybeNeverTypeVar.
// Unifying MaybeNeverTypeVar and other concrete type will let the former become it.
(Ty::Infer(InferTy::TypeVar(tv)), other)
| (other, Ty::Infer(InferTy::TypeVar(tv)))
| (Ty::Infer(InferTy::MaybeNeverTypeVar(tv)), other)
| (other, Ty::Infer(InferTy::MaybeNeverTypeVar(tv)))
| (Ty::Infer(InferTy::IntVar(tv)), other @ ty_app!(TypeCtor::Int(_)))
| (other @ ty_app!(TypeCtor::Int(_)), Ty::Infer(InferTy::IntVar(tv)))
| (Ty::Infer(InferTy::FloatVar(tv)), other @ ty_app!(TypeCtor::Float(_)))
| (other @ ty_app!(TypeCtor::Float(_)), Ty::Infer(InferTy::FloatVar(tv))) => {
// the type var is unknown since we tried to resolve it
self.var_unification_table.union_value(*tv, TypeVarValue::Known(other.clone()));
true
}
_ => false,
}
}
/// If `ty` is a type variable with known type, returns that type;
/// otherwise, return ty.
pub fn resolve_ty_shallow<'b>(&mut self, ty: &'b Ty) -> Cow<'b, Ty> {
let mut ty = Cow::Borrowed(ty);
// The type variable could resolve to a int/float variable. Hence try
// resolving up to three times; each type of variable shouldn't occur
// more than once
for i in 0..3 {
if i > 0 {
tested_by!(type_var_resolves_to_int_var);
}
match &*ty {
Ty::Infer(tv) => {
let inner = tv.to_inner();
match self.var_unification_table.inlined_probe_value(inner).known() {
Some(known_ty) => {
// The known_ty can't be a type var itself
ty = Cow::Owned(known_ty.clone());
}
_ => return ty,
}
}
_ => return ty,
}
}
log::error!("Inference variable still not resolved: {:?}", ty);
ty
}
/// Resolves the type as far as currently possible, replacing type variables
/// by their known types. All types returned by the infer_* functions should
/// be resolved as far as possible, i.e. contain no type variables with
/// known type.
fn resolve_ty_as_possible_inner(&mut self, tv_stack: &mut Vec<TypeVarId>, ty: Ty) -> Ty {
ty.fold(&mut |ty| match ty {
Ty::Infer(tv) => {
let inner = tv.to_inner();
if tv_stack.contains(&inner) {
tested_by!(type_var_cycles_resolve_as_possible);
// recursive type
return tv.fallback_value();
}
if let Some(known_ty) =
self.var_unification_table.inlined_probe_value(inner).known()
{
// known_ty may contain other variables that are known by now
tv_stack.push(inner);
let result = self.resolve_ty_as_possible_inner(tv_stack, known_ty.clone());
tv_stack.pop();
result
} else {
ty
}
}
_ => ty,
})
}
/// Resolves the type completely; type variables without known type are
/// replaced by Ty::Unknown.
fn resolve_ty_completely_inner(&mut self, tv_stack: &mut Vec<TypeVarId>, ty: Ty) -> Ty {
ty.fold(&mut |ty| match ty {
Ty::Infer(tv) => {
let inner = tv.to_inner();
if tv_stack.contains(&inner) {
tested_by!(type_var_cycles_resolve_completely);
// recursive type
return tv.fallback_value();
}
if let Some(known_ty) =
self.var_unification_table.inlined_probe_value(inner).known()
{
// known_ty may contain other variables that are known by now
tv_stack.push(inner);
let result = self.resolve_ty_completely_inner(tv_stack, known_ty.clone());
tv_stack.pop();
result
} else {
tv.fallback_value()
}
}
_ => ty,
})
}
}
/// The ID of a type variable.
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub struct TypeVarId(pub(super) u32);
impl UnifyKey for TypeVarId {
type Value = TypeVarValue;
fn index(&self) -> u32 {
self.0
}
fn from_index(i: u32) -> Self {
TypeVarId(i)
}
fn tag() -> &'static str {
"TypeVarId"
}
}
/// The value of a type variable: either we already know the type, or we don't
/// know it yet.
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum TypeVarValue {
Known(Ty),
Unknown,
}
impl TypeVarValue {
fn known(&self) -> Option<&Ty> {
match self {
TypeVarValue::Known(ty) => Some(ty),
TypeVarValue::Unknown => None,
}
}
}
impl UnifyValue for TypeVarValue {
type Error = NoError;
fn unify_values(value1: &Self, value2: &Self) -> Result<Self, NoError> {
match (value1, value2) {
// We should never equate two type variables, both of which have
// known types. Instead, we recursively equate those types.
(TypeVarValue::Known(t1), TypeVarValue::Known(t2)) => panic!(
"equating two type variables, both of which have known types: {:?} and {:?}",
t1, t2
),
// If one side is known, prefer that one.
(TypeVarValue::Known(..), TypeVarValue::Unknown) => Ok(value1.clone()),
(TypeVarValue::Unknown, TypeVarValue::Known(..)) => Ok(value2.clone()),
(TypeVarValue::Unknown, TypeVarValue::Unknown) => Ok(TypeVarValue::Unknown),
} }
} }
} }

View file

@ -44,8 +44,8 @@ use std::sync::Arc;
use std::{fmt, iter, mem}; use std::{fmt, iter, mem};
use hir_def::{ use hir_def::{
expr::ExprId, generics::GenericParams, type_ref::Mutability, AdtId, ContainerId, DefWithBodyId, expr::ExprId, type_ref::Mutability, AdtId, AssocContainerId, DefWithBodyId, GenericDefId,
GenericDefId, HasModule, Lookup, TraitId, TypeAliasId, HasModule, Lookup, TraitId, TypeAliasId,
}; };
use hir_expand::name::Name; use hir_expand::name::Name;
use ra_db::{impl_intern_key, salsa, CrateId}; use ra_db::{impl_intern_key, salsa, CrateId};
@ -53,7 +53,7 @@ use ra_db::{impl_intern_key, salsa, CrateId};
use crate::{ use crate::{
db::HirDatabase, db::HirDatabase,
primitive::{FloatTy, IntTy, Uncertain}, primitive::{FloatTy, IntTy, Uncertain},
utils::make_mut_slice, utils::{generics, make_mut_slice, Generics},
}; };
use display::{HirDisplay, HirFormatter}; use display::{HirDisplay, HirFormatter};
@ -166,16 +166,16 @@ impl TypeCtor {
| TypeCtor::Closure { .. } // 1 param representing the signature of the closure | TypeCtor::Closure { .. } // 1 param representing the signature of the closure
=> 1, => 1,
TypeCtor::Adt(adt) => { TypeCtor::Adt(adt) => {
let generic_params = db.generic_params(AdtId::from(adt).into()); let generic_params = generics(db, AdtId::from(adt).into());
generic_params.count_params_including_parent() generic_params.len()
} }
TypeCtor::FnDef(callable) => { TypeCtor::FnDef(callable) => {
let generic_params = db.generic_params(callable.into()); let generic_params = generics(db, callable.into());
generic_params.count_params_including_parent() generic_params.len()
} }
TypeCtor::AssociatedType(type_alias) => { TypeCtor::AssociatedType(type_alias) => {
let generic_params = db.generic_params(type_alias.into()); let generic_params = generics(db, type_alias.into());
generic_params.count_params_including_parent() generic_params.len()
} }
TypeCtor::FnPtr { num_args } => num_args as usize + 1, TypeCtor::FnPtr { num_args } => num_args as usize + 1,
TypeCtor::Tuple { cardinality } => cardinality as usize, TypeCtor::Tuple { cardinality } => cardinality as usize,
@ -251,7 +251,7 @@ impl ProjectionTy {
fn trait_(&self, db: &impl HirDatabase) -> TraitId { fn trait_(&self, db: &impl HirDatabase) -> TraitId {
match self.associated_ty.lookup(db).container { match self.associated_ty.lookup(db).container {
ContainerId::TraitId(it) => it, AssocContainerId::TraitId(it) => it,
_ => panic!("projection ty without parent trait"), _ => panic!("projection ty without parent trait"),
} }
} }
@ -364,36 +364,26 @@ impl Substs {
} }
/// Return Substs that replace each parameter by itself (i.e. `Ty::Param`). /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`).
pub fn identity(generic_params: &GenericParams) -> Substs { pub(crate) fn identity(generic_params: &Generics) -> Substs {
Substs( Substs(
generic_params generic_params.iter().map(|(idx, p)| Ty::Param { idx, name: p.name.clone() }).collect(),
.params_including_parent()
.into_iter()
.map(|p| Ty::Param { idx: p.idx, name: p.name.clone() })
.collect(),
) )
} }
/// Return Substs that replace each parameter by a bound variable. /// Return Substs that replace each parameter by a bound variable.
pub fn bound_vars(generic_params: &GenericParams) -> Substs { pub(crate) fn bound_vars(generic_params: &Generics) -> Substs {
Substs( Substs(generic_params.iter().map(|(idx, _p)| Ty::Bound(idx)).collect())
generic_params
.params_including_parent()
.into_iter()
.map(|p| Ty::Bound(p.idx))
.collect(),
)
} }
pub fn build_for_def(db: &impl HirDatabase, def: impl Into<GenericDefId>) -> SubstsBuilder { pub fn build_for_def(db: &impl HirDatabase, def: impl Into<GenericDefId>) -> SubstsBuilder {
let def = def.into(); let def = def.into();
let params = db.generic_params(def); let params = generics(db, def);
let param_count = params.count_params_including_parent(); let param_count = params.len();
Substs::builder(param_count) Substs::builder(param_count)
} }
pub fn build_for_generics(generic_params: &GenericParams) -> SubstsBuilder { pub(crate) fn build_for_generics(generic_params: &Generics) -> SubstsBuilder {
Substs::builder(generic_params.count_params_including_parent()) Substs::builder(generic_params.len())
} }
pub fn build_for_type_ctor(db: &impl HirDatabase, type_ctor: TypeCtor) -> SubstsBuilder { pub fn build_for_type_ctor(db: &impl HirDatabase, type_ctor: TypeCtor) -> SubstsBuilder {
@ -486,21 +476,6 @@ impl TypeWalk for TraitRef {
} }
} }
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum ImplTy {
Inherent(Ty),
TraitRef(TraitRef),
}
impl ImplTy {
pub(crate) fn self_type(&self) -> &Ty {
match self {
ImplTy::Inherent(it) => it,
ImplTy::TraitRef(tr) => &tr.substs[0],
}
}
}
/// Like `generics::WherePredicate`, but with resolved types: A condition on the /// Like `generics::WherePredicate`, but with resolved types: A condition on the
/// parameters of a generic item. /// parameters of a generic item.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -931,13 +906,44 @@ impl HirDisplay for ApplicationTy {
write!(f, "{}", name)?; write!(f, "{}", name)?;
if self.parameters.len() > 0 { if self.parameters.len() > 0 {
write!(f, "<")?; write!(f, "<")?;
f.write_joined(&*self.parameters.0, ", ")?;
let mut non_default_parameters = Vec::with_capacity(self.parameters.len());
let parameters_to_write = if f.should_display_default_types() {
self.parameters.0.as_ref()
} else {
match self
.ctor
.as_generic_def()
.map(|generic_def_id| f.db.generic_defaults(generic_def_id))
.filter(|defaults| !defaults.is_empty())
{
Option::None => self.parameters.0.as_ref(),
Option::Some(default_parameters) => {
for (i, parameter) in self.parameters.iter().enumerate() {
match (parameter, default_parameters.get(i)) {
(&Ty::Unknown, _) | (_, None) => {
non_default_parameters.push(parameter.clone())
}
(_, Some(default_parameter))
if parameter != default_parameter =>
{
non_default_parameters.push(parameter.clone())
}
_ => (),
}
}
&non_default_parameters
}
}
};
f.write_joined(parameters_to_write, ", ")?;
write!(f, ">")?; write!(f, ">")?;
} }
} }
TypeCtor::AssociatedType(type_alias) => { TypeCtor::AssociatedType(type_alias) => {
let trait_ = match type_alias.lookup(f.db).container { let trait_ = match type_alias.lookup(f.db).container {
ContainerId::TraitId(it) => it, AssocContainerId::TraitId(it) => it,
_ => panic!("not an associated type"), _ => panic!("not an associated type"),
}; };
let trait_name = f.db.trait_data(trait_).name.clone(); let trait_name = f.db.trait_data(trait_).name.clone();

View file

@ -11,10 +11,10 @@ use std::sync::Arc;
use hir_def::{ use hir_def::{
builtin_type::BuiltinType, builtin_type::BuiltinType,
generics::WherePredicate, generics::WherePredicate,
path::{GenericArg, Path, PathKind, PathSegment}, path::{GenericArg, Path, PathSegment, PathSegments},
resolver::{HasResolver, Resolver, TypeNs}, resolver::{HasResolver, Resolver, TypeNs},
type_ref::{TypeBound, TypeRef}, type_ref::{TypeBound, TypeRef},
AdtId, AstItemDef, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, AdtId, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId,
LocalStructFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, UnionId, VariantId, LocalStructFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, UnionId, VariantId,
}; };
use ra_arena::map::ArenaMap; use ra_arena::map::ArenaMap;
@ -24,11 +24,11 @@ use crate::{
db::HirDatabase, db::HirDatabase,
primitive::{FloatTy, IntTy}, primitive::{FloatTy, IntTy},
utils::{ utils::{
all_super_traits, associated_type_by_name_including_super_traits, make_mut_slice, all_super_traits, associated_type_by_name_including_super_traits, generics, make_mut_slice,
variant_data, variant_data,
}, },
FnSig, GenericPredicate, ImplTy, ProjectionPredicate, ProjectionTy, Substs, TraitEnvironment, FnSig, GenericPredicate, ProjectionPredicate, ProjectionTy, Substs, TraitEnvironment, TraitRef,
TraitRef, Ty, TypeCtor, TypeWalk, Ty, TypeCtor, TypeWalk,
}; };
impl Ty { impl Ty {
@ -101,17 +101,19 @@ impl Ty {
TypeRef::Path(path) => path, TypeRef::Path(path) => path,
_ => return None, _ => return None,
}; };
if let PathKind::Type(_) = &path.kind { if path.type_anchor().is_some() {
return None; return None;
} }
if path.segments.len() > 1 { if path.segments().len() > 1 {
return None; return None;
} }
let resolution = match resolver.resolve_path_in_type_ns(db, path) { let resolution = match resolver.resolve_path_in_type_ns(db, path.mod_path()) {
Some((it, None)) => it, Some((it, None)) => it,
_ => return None, _ => return None,
}; };
if let TypeNs::GenericParam(idx) = resolution { if let TypeNs::GenericParam(param_id) = resolution {
let generics = generics(db, resolver.generic_def().expect("generics in scope"));
let idx = generics.param_idx(param_id);
Some(idx) Some(idx)
} else { } else {
None None
@ -122,11 +124,11 @@ impl Ty {
db: &impl HirDatabase, db: &impl HirDatabase,
resolver: &Resolver, resolver: &Resolver,
ty: Ty, ty: Ty,
remaining_segments: &[PathSegment], remaining_segments: PathSegments<'_>,
) -> Ty { ) -> Ty {
if remaining_segments.len() == 1 { if remaining_segments.len() == 1 {
// resolve unselected assoc types // resolve unselected assoc types
let segment = &remaining_segments[0]; let segment = remaining_segments.first().unwrap();
Ty::select_associated_type(db, resolver, ty, segment) Ty::select_associated_type(db, resolver, ty, segment)
} else if remaining_segments.len() > 1 { } else if remaining_segments.len() > 1 {
// FIXME report error (ambiguous associated type) // FIXME report error (ambiguous associated type)
@ -140,15 +142,15 @@ impl Ty {
db: &impl HirDatabase, db: &impl HirDatabase,
resolver: &Resolver, resolver: &Resolver,
resolution: TypeNs, resolution: TypeNs,
resolved_segment: &PathSegment, resolved_segment: PathSegment<'_>,
remaining_segments: &[PathSegment], remaining_segments: PathSegments<'_>,
) -> Ty { ) -> Ty {
let ty = match resolution { let ty = match resolution {
TypeNs::TraitId(trait_) => { TypeNs::TraitId(trait_) => {
let trait_ref = let trait_ref =
TraitRef::from_resolved_path(db, resolver, trait_, resolved_segment, None); TraitRef::from_resolved_path(db, resolver, trait_, resolved_segment, None);
return if remaining_segments.len() == 1 { return if remaining_segments.len() == 1 {
let segment = &remaining_segments[0]; let segment = remaining_segments.first().unwrap();
let associated_ty = associated_type_by_name_including_super_traits( let associated_ty = associated_type_by_name_including_super_traits(
db, db,
trait_ref.trait_, trait_ref.trait_,
@ -174,12 +176,14 @@ impl Ty {
Ty::Dyn(Arc::new([GenericPredicate::Implemented(trait_ref)])) Ty::Dyn(Arc::new([GenericPredicate::Implemented(trait_ref)]))
}; };
} }
TypeNs::GenericParam(idx) => { TypeNs::GenericParam(param_id) => {
let generics = generics(db, resolver.generic_def().expect("generics in scope"));
let idx = generics.param_idx(param_id);
// FIXME: maybe return name in resolution? // FIXME: maybe return name in resolution?
let name = resolved_segment.name.clone(); let name = generics.param_name(param_id);
Ty::Param { idx, name } Ty::Param { idx, name }
} }
TypeNs::SelfType(impl_id) => db.impl_ty(impl_id).self_type().clone(), TypeNs::SelfType(impl_id) => db.impl_self_ty(impl_id).clone(),
TypeNs::AdtSelfType(adt) => db.ty(adt.into()), TypeNs::AdtSelfType(adt) => db.ty(adt.into()),
TypeNs::AdtId(it) => Ty::from_hir_path_inner(db, resolver, resolved_segment, it.into()), TypeNs::AdtId(it) => Ty::from_hir_path_inner(db, resolver, resolved_segment, it.into()),
@ -198,21 +202,21 @@ impl Ty {
pub(crate) fn from_hir_path(db: &impl HirDatabase, resolver: &Resolver, path: &Path) -> Ty { pub(crate) fn from_hir_path(db: &impl HirDatabase, resolver: &Resolver, path: &Path) -> Ty {
// Resolve the path (in type namespace) // Resolve the path (in type namespace)
if let PathKind::Type(type_ref) = &path.kind { if let Some(type_ref) = path.type_anchor() {
let ty = Ty::from_hir(db, resolver, &type_ref); let ty = Ty::from_hir(db, resolver, &type_ref);
let remaining_segments = &path.segments[..]; return Ty::from_type_relative_path(db, resolver, ty, path.segments());
return Ty::from_type_relative_path(db, resolver, ty, remaining_segments);
} }
let (resolution, remaining_index) = match resolver.resolve_path_in_type_ns(db, path) { let (resolution, remaining_index) =
Some(it) => it, match resolver.resolve_path_in_type_ns(db, path.mod_path()) {
None => return Ty::Unknown, Some(it) => it,
}; None => return Ty::Unknown,
};
let (resolved_segment, remaining_segments) = match remaining_index { let (resolved_segment, remaining_segments) = match remaining_index {
None => ( None => (
path.segments.last().expect("resolved path has at least one element"), path.segments().last().expect("resolved path has at least one element"),
&[] as &[PathSegment], PathSegments::EMPTY,
), ),
Some(i) => (&path.segments[i - 1], &path.segments[i..]), Some(i) => (path.segments().get(i - 1).unwrap(), path.segments().skip(i)),
}; };
Ty::from_partly_resolved_hir_path( Ty::from_partly_resolved_hir_path(
db, db,
@ -227,7 +231,7 @@ impl Ty {
db: &impl HirDatabase, db: &impl HirDatabase,
resolver: &Resolver, resolver: &Resolver,
self_ty: Ty, self_ty: Ty,
segment: &PathSegment, segment: PathSegment<'_>,
) -> Ty { ) -> Ty {
let param_idx = match self_ty { let param_idx = match self_ty {
Ty::Param { idx, .. } => idx, Ty::Param { idx, .. } => idx,
@ -257,7 +261,7 @@ impl Ty {
fn from_hir_path_inner( fn from_hir_path_inner(
db: &impl HirDatabase, db: &impl HirDatabase,
resolver: &Resolver, resolver: &Resolver,
segment: &PathSegment, segment: PathSegment<'_>,
typable: TyDefId, typable: TyDefId,
) -> Ty { ) -> Ty {
let generic_def = match typable { let generic_def = match typable {
@ -280,7 +284,7 @@ impl Ty {
// special-case enum variants // special-case enum variants
resolved: ValueTyDefId, resolved: ValueTyDefId,
) -> Substs { ) -> Substs {
let last = path.segments.last().expect("path should have at least one segment"); let last = path.segments().last().expect("path should have at least one segment");
let (segment, generic_def) = match resolved { let (segment, generic_def) = match resolved {
ValueTyDefId::FunctionId(it) => (last, Some(it.into())), ValueTyDefId::FunctionId(it) => (last, Some(it.into())),
ValueTyDefId::StructId(it) => (last, Some(it.into())), ValueTyDefId::StructId(it) => (last, Some(it.into())),
@ -292,13 +296,11 @@ impl Ty {
// referring to the variant. So `Option::<T>::None` and // referring to the variant. So `Option::<T>::None` and
// `Option::None::<T>` are both allowed (though the former is // `Option::None::<T>` are both allowed (though the former is
// preferred). See also `def_ids_for_path_segments` in rustc. // preferred). See also `def_ids_for_path_segments` in rustc.
let len = path.segments.len(); let len = path.segments().len();
let segment = if len >= 2 && path.segments[len - 2].args_and_bindings.is_some() { let penultimate = if len >= 2 { path.segments().get(len - 2) } else { None };
// Option::<T>::None let segment = match penultimate {
&path.segments[len - 2] Some(segment) if segment.args_and_bindings.is_some() => segment,
} else { _ => last,
// Option::None::<T>
last
}; };
(segment, Some(var.parent.into())) (segment, Some(var.parent.into()))
} }
@ -310,16 +312,15 @@ impl Ty {
pub(super) fn substs_from_path_segment( pub(super) fn substs_from_path_segment(
db: &impl HirDatabase, db: &impl HirDatabase,
resolver: &Resolver, resolver: &Resolver,
segment: &PathSegment, segment: PathSegment<'_>,
def_generic: Option<GenericDefId>, def_generic: Option<GenericDefId>,
add_self_param: bool, add_self_param: bool,
) -> Substs { ) -> Substs {
let mut substs = Vec::new(); let mut substs = Vec::new();
let def_generics = def_generic.map(|def| db.generic_params(def.into())); let def_generics = def_generic.map(|def| generics(db, def.into()));
let (parent_param_count, param_count) = let (total_len, parent_len, child_len) = def_generics.map_or((0, 0, 0), |g| g.len_split());
def_generics.map_or((0, 0), |g| (g.count_parent_params(), g.params.len())); substs.extend(iter::repeat(Ty::Unknown).take(parent_len));
substs.extend(iter::repeat(Ty::Unknown).take(parent_param_count));
if add_self_param { if add_self_param {
// FIXME this add_self_param argument is kind of a hack: Traits have the // FIXME this add_self_param argument is kind of a hack: Traits have the
// Self type as an implicit first type parameter, but it can't be // Self type as an implicit first type parameter, but it can't be
@ -330,8 +331,8 @@ pub(super) fn substs_from_path_segment(
if let Some(generic_args) = &segment.args_and_bindings { if let Some(generic_args) = &segment.args_and_bindings {
// if args are provided, it should be all of them, but we can't rely on that // if args are provided, it should be all of them, but we can't rely on that
let self_param_correction = if add_self_param { 1 } else { 0 }; let self_param_correction = if add_self_param { 1 } else { 0 };
let param_count = param_count - self_param_correction; let child_len = child_len + self_param_correction;
for arg in generic_args.args.iter().take(param_count) { for arg in generic_args.args.iter().take(child_len) {
match arg { match arg {
GenericArg::Type(type_ref) => { GenericArg::Type(type_ref) => {
let ty = Ty::from_hir(db, resolver, type_ref); let ty = Ty::from_hir(db, resolver, type_ref);
@ -342,10 +343,10 @@ pub(super) fn substs_from_path_segment(
} }
// add placeholders for args that were not provided // add placeholders for args that were not provided
let supplied_params = substs.len(); let supplied_params = substs.len();
for _ in supplied_params..parent_param_count + param_count { for _ in supplied_params..total_len {
substs.push(Ty::Unknown); substs.push(Ty::Unknown);
} }
assert_eq!(substs.len(), parent_param_count + param_count); assert_eq!(substs.len(), total_len);
// handle defaults // handle defaults
if let Some(def_generic) = def_generic { if let Some(def_generic) = def_generic {
@ -369,11 +370,11 @@ impl TraitRef {
path: &Path, path: &Path,
explicit_self_ty: Option<Ty>, explicit_self_ty: Option<Ty>,
) -> Option<Self> { ) -> Option<Self> {
let resolved = match resolver.resolve_path_in_type_ns_fully(db, &path)? { let resolved = match resolver.resolve_path_in_type_ns_fully(db, path.mod_path())? {
TypeNs::TraitId(tr) => tr, TypeNs::TraitId(tr) => tr,
_ => return None, _ => return None,
}; };
let segment = path.segments.last().expect("path should have at least one segment"); let segment = path.segments().last().expect("path should have at least one segment");
Some(TraitRef::from_resolved_path(db, resolver, resolved.into(), segment, explicit_self_ty)) Some(TraitRef::from_resolved_path(db, resolver, resolved.into(), segment, explicit_self_ty))
} }
@ -381,7 +382,7 @@ impl TraitRef {
db: &impl HirDatabase, db: &impl HirDatabase,
resolver: &Resolver, resolver: &Resolver,
resolved: TraitId, resolved: TraitId,
segment: &PathSegment, segment: PathSegment<'_>,
explicit_self_ty: Option<Ty>, explicit_self_ty: Option<Ty>,
) -> Self { ) -> Self {
let mut substs = TraitRef::substs_from_path(db, resolver, segment, resolved); let mut substs = TraitRef::substs_from_path(db, resolver, segment, resolved);
@ -407,7 +408,7 @@ impl TraitRef {
fn substs_from_path( fn substs_from_path(
db: &impl HirDatabase, db: &impl HirDatabase,
resolver: &Resolver, resolver: &Resolver,
segment: &PathSegment, segment: PathSegment<'_>,
resolved: TraitId, resolved: TraitId,
) -> Substs { ) -> Substs {
let has_self_param = let has_self_param =
@ -461,12 +462,12 @@ fn assoc_type_bindings_from_type_bound<'a>(
trait_ref: TraitRef, trait_ref: TraitRef,
) -> impl Iterator<Item = GenericPredicate> + 'a { ) -> impl Iterator<Item = GenericPredicate> + 'a {
let last_segment = match bound { let last_segment = match bound {
TypeBound::Path(path) => path.segments.last(), TypeBound::Path(path) => path.segments().last(),
TypeBound::Error => None, TypeBound::Error => None,
}; };
last_segment last_segment
.into_iter() .into_iter()
.flat_map(|segment| segment.args_and_bindings.iter()) .flat_map(|segment| segment.args_and_bindings.into_iter())
.flat_map(|args_and_bindings| args_and_bindings.bindings.iter()) .flat_map(|args_and_bindings| args_and_bindings.bindings.iter())
.map(move |(name, type_ref)| { .map(move |(name, type_ref)| {
let associated_ty = let associated_ty =
@ -532,6 +533,15 @@ pub(crate) fn generic_predicates_for_param_query(
.collect() .collect()
} }
pub(crate) fn generic_predicates_for_param_recover(
_db: &impl HirDatabase,
_cycle: &[String],
_def: &GenericDefId,
_param_idx: &u32,
) -> Arc<[GenericPredicate]> {
Arc::new([])
}
impl TraitEnvironment { impl TraitEnvironment {
pub fn lower(db: &impl HirDatabase, resolver: &Resolver) -> Arc<TraitEnvironment> { pub fn lower(db: &impl HirDatabase, resolver: &Resolver) -> Arc<TraitEnvironment> {
let predicates = resolver let predicates = resolver
@ -558,12 +568,11 @@ pub(crate) fn generic_predicates_query(
/// Resolve the default type params from generics /// Resolve the default type params from generics
pub(crate) fn generic_defaults_query(db: &impl HirDatabase, def: GenericDefId) -> Substs { pub(crate) fn generic_defaults_query(db: &impl HirDatabase, def: GenericDefId) -> Substs {
let resolver = def.resolver(db); let resolver = def.resolver(db);
let generic_params = db.generic_params(def.into()); let generic_params = generics(db, def.into());
let defaults = generic_params let defaults = generic_params
.params_including_parent() .iter()
.into_iter() .map(|(_idx, p)| p.default.as_ref().map_or(Ty::Unknown, |t| Ty::from_hir(db, &resolver, t)))
.map(|p| p.default.as_ref().map_or(Ty::Unknown, |t| Ty::from_hir(db, &resolver, t)))
.collect(); .collect();
Substs(defaults) Substs(defaults)
@ -580,7 +589,7 @@ fn fn_sig_for_fn(db: &impl HirDatabase, def: FunctionId) -> FnSig {
/// Build the declared type of a function. This should not need to look at the /// Build the declared type of a function. This should not need to look at the
/// function body. /// function body.
fn type_for_fn(db: &impl HirDatabase, def: FunctionId) -> Ty { fn type_for_fn(db: &impl HirDatabase, def: FunctionId) -> Ty {
let generics = db.generic_params(def.into()); let generics = generics(db, def.into());
let substs = Substs::identity(&generics); let substs = Substs::identity(&generics);
Ty::apply(TypeCtor::FnDef(def.into()), substs) Ty::apply(TypeCtor::FnDef(def.into()), substs)
} }
@ -630,7 +639,7 @@ fn type_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> Ty {
if struct_data.variant_data.is_unit() { if struct_data.variant_data.is_unit() {
return type_for_adt(db, def.into()); // Unit struct return type_for_adt(db, def.into()); // Unit struct
} }
let generics = db.generic_params(def.into()); let generics = generics(db, def.into());
let substs = Substs::identity(&generics); let substs = Substs::identity(&generics);
Ty::apply(TypeCtor::FnDef(def.into()), substs) Ty::apply(TypeCtor::FnDef(def.into()), substs)
} }
@ -644,7 +653,7 @@ fn fn_sig_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariantId
.iter() .iter()
.map(|(_, field)| Ty::from_hir(db, &resolver, &field.type_ref)) .map(|(_, field)| Ty::from_hir(db, &resolver, &field.type_ref))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let generics = db.generic_params(def.parent.into()); let generics = generics(db, def.parent.into());
let substs = Substs::identity(&generics); let substs = Substs::identity(&generics);
let ret = type_for_adt(db, def.parent.into()).subst(&substs); let ret = type_for_adt(db, def.parent.into()).subst(&substs);
FnSig::from_params_and_return(params, ret) FnSig::from_params_and_return(params, ret)
@ -657,18 +666,18 @@ fn type_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariantId)
if var_data.is_unit() { if var_data.is_unit() {
return type_for_adt(db, def.parent.into()); // Unit variant return type_for_adt(db, def.parent.into()); // Unit variant
} }
let generics = db.generic_params(def.parent.into()); let generics = generics(db, def.parent.into());
let substs = Substs::identity(&generics); let substs = Substs::identity(&generics);
Ty::apply(TypeCtor::FnDef(EnumVariantId::from(def).into()), substs) Ty::apply(TypeCtor::FnDef(EnumVariantId::from(def).into()), substs)
} }
fn type_for_adt(db: &impl HirDatabase, adt: AdtId) -> Ty { fn type_for_adt(db: &impl HirDatabase, adt: AdtId) -> Ty {
let generics = db.generic_params(adt.into()); let generics = generics(db, adt.into());
Ty::apply(TypeCtor::Adt(adt), Substs::identity(&generics)) Ty::apply(TypeCtor::Adt(adt), Substs::identity(&generics))
} }
fn type_for_type_alias(db: &impl HirDatabase, t: TypeAliasId) -> Ty { fn type_for_type_alias(db: &impl HirDatabase, t: TypeAliasId) -> Ty {
let generics = db.generic_params(t.into()); let generics = generics(db, t.into());
let resolver = t.resolver(db); let resolver = t.resolver(db);
let type_ref = &db.type_alias_data(t).type_ref; let type_ref = &db.type_alias_data(t).type_ref;
let substs = Substs::identity(&generics); let substs = Substs::identity(&generics);
@ -687,10 +696,11 @@ impl_froms!(CallableDef: FunctionId, StructId, EnumVariantId);
impl CallableDef { impl CallableDef {
pub fn krate(self, db: &impl HirDatabase) -> CrateId { pub fn krate(self, db: &impl HirDatabase) -> CrateId {
match self { match self {
CallableDef::FunctionId(f) => f.lookup(db).module(db).krate, CallableDef::FunctionId(f) => f.lookup(db).module(db),
CallableDef::StructId(s) => s.module(db).krate, CallableDef::StructId(s) => s.lookup(db).container.module(db),
CallableDef::EnumVariantId(e) => e.parent.module(db).krate, CallableDef::EnumVariantId(e) => e.parent.lookup(db).container.module(db),
} }
.krate
} }
} }
@ -733,6 +743,11 @@ pub(crate) fn ty_query(db: &impl HirDatabase, def: TyDefId) -> Ty {
TyDefId::TypeAliasId(it) => type_for_type_alias(db, it), TyDefId::TypeAliasId(it) => type_for_type_alias(db, it),
} }
} }
pub(crate) fn ty_recover(_db: &impl HirDatabase, _cycle: &[String], _def: &TyDefId) -> Ty {
Ty::Unknown
}
pub(crate) fn value_ty_query(db: &impl HirDatabase, def: ValueTyDefId) -> Ty { pub(crate) fn value_ty_query(db: &impl HirDatabase, def: ValueTyDefId) -> Ty {
match def { match def {
ValueTyDefId::FunctionId(it) => type_for_fn(db, it), ValueTyDefId::FunctionId(it) => type_for_fn(db, it),
@ -743,17 +758,24 @@ pub(crate) fn value_ty_query(db: &impl HirDatabase, def: ValueTyDefId) -> Ty {
} }
} }
pub(crate) fn impl_ty_query(db: &impl HirDatabase, impl_id: ImplId) -> ImplTy { pub(crate) fn impl_self_ty_query(db: &impl HirDatabase, impl_id: ImplId) -> Ty {
let impl_data = db.impl_data(impl_id); let impl_data = db.impl_data(impl_id);
let resolver = impl_id.resolver(db); let resolver = impl_id.resolver(db);
let self_ty = Ty::from_hir(db, &resolver, &impl_data.target_type); Ty::from_hir(db, &resolver, &impl_data.target_type)
match impl_data.target_trait.as_ref() { }
Some(trait_ref) => {
match TraitRef::from_hir(db, &resolver, trait_ref, Some(self_ty.clone())) { pub(crate) fn impl_self_ty_recover(
Some(it) => ImplTy::TraitRef(it), _db: &impl HirDatabase,
None => ImplTy::Inherent(self_ty), _cycle: &[String],
} _impl_id: &ImplId,
} ) -> Ty {
None => ImplTy::Inherent(self_ty), Ty::Unknown
} }
pub(crate) fn impl_trait_query(db: &impl HirDatabase, impl_id: ImplId) -> Option<TraitRef> {
let impl_data = db.impl_data(impl_id);
let resolver = impl_id.resolver(db);
let self_ty = db.impl_self_ty(impl_id);
let target_trait = impl_data.target_trait.as_ref()?;
TraitRef::from_hir(db, &resolver, target_trait, Some(self_ty.clone()))
} }

View file

@ -6,4 +6,5 @@ test_utils::marks!(
type_var_resolves_to_int_var type_var_resolves_to_int_var
match_ergonomics_ref match_ergonomics_ref
coerce_merge_fail_fallback coerce_merge_fail_fallback
insert_vars_for_impl_trait
); );

View file

@ -6,20 +6,21 @@ use std::sync::Arc;
use arrayvec::ArrayVec; use arrayvec::ArrayVec;
use hir_def::{ use hir_def::{
lang_item::LangItemTarget, resolver::Resolver, type_ref::Mutability, AssocItemId, AstItemDef, lang_item::LangItemTarget, resolver::Resolver, type_ref::Mutability, AssocContainerId,
FunctionId, HasModule, ImplId, TraitId, AssocItemId, FunctionId, HasModule, ImplId, Lookup, TraitId,
}; };
use hir_expand::name::Name; use hir_expand::name::Name;
use ra_db::CrateId; use ra_db::CrateId;
use ra_prof::profile; use ra_prof::profile;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use super::Substs;
use crate::{ use crate::{
autoderef, autoderef,
db::HirDatabase, db::HirDatabase,
primitive::{FloatBitness, Uncertain}, primitive::{FloatBitness, Uncertain},
utils::all_super_traits, utils::all_super_traits,
Canonical, ImplTy, InEnvironment, TraitEnvironment, TraitRef, Ty, TypeCtor, Canonical, InEnvironment, TraitEnvironment, TraitRef, Ty, TypeCtor, TypeWalk,
}; };
/// This is used as a key for indexing impls. /// This is used as a key for indexing impls.
@ -57,12 +58,13 @@ impl CrateImplBlocks {
let crate_def_map = db.crate_def_map(krate); let crate_def_map = db.crate_def_map(krate);
for (_module_id, module_data) in crate_def_map.modules.iter() { for (_module_id, module_data) in crate_def_map.modules.iter() {
for &impl_id in module_data.impls.iter() { for impl_id in module_data.scope.impls() {
match db.impl_ty(impl_id) { match db.impl_trait(impl_id) {
ImplTy::TraitRef(tr) => { Some(tr) => {
res.impls_by_trait.entry(tr.trait_).or_default().push(impl_id); res.impls_by_trait.entry(tr.trait_).or_default().push(impl_id);
} }
ImplTy::Inherent(self_ty) => { None => {
let self_ty = db.impl_self_ty(impl_id);
if let Some(self_ty_fp) = TyFingerprint::for_impl(&self_ty) { if let Some(self_ty_fp) = TyFingerprint::for_impl(&self_ty) {
res.impls.entry(self_ty_fp).or_default().push(impl_id); res.impls.entry(self_ty_fp).or_default().push(impl_id);
} }
@ -132,7 +134,7 @@ impl Ty {
LangItemTarget::ImplBlockId(it) => Some(it), LangItemTarget::ImplBlockId(it) => Some(it),
_ => None, _ => None,
}) })
.map(|it| it.module(db).krate) .map(|it| it.lookup(db).container.module(db).krate)
.collect(); .collect();
Some(res) Some(res)
} }
@ -175,7 +177,6 @@ pub fn iterate_method_candidates<T>(
mode: LookupMode, mode: LookupMode,
mut callback: impl FnMut(&Ty, AssocItemId) -> Option<T>, mut callback: impl FnMut(&Ty, AssocItemId) -> Option<T>,
) -> Option<T> { ) -> Option<T> {
let krate = resolver.krate()?;
match mode { match mode {
LookupMode::MethodCall => { LookupMode::MethodCall => {
// For method calls, rust first does any number of autoderef, and then one // For method calls, rust first does any number of autoderef, and then one
@ -188,57 +189,159 @@ pub fn iterate_method_candidates<T>(
// rustc does an autoderef and then autoref again). // rustc does an autoderef and then autoref again).
let environment = TraitEnvironment::lower(db, resolver); let environment = TraitEnvironment::lower(db, resolver);
let ty = InEnvironment { value: ty.clone(), environment }; let ty = InEnvironment { value: ty.clone(), environment };
for derefed_ty in autoderef::autoderef(db, resolver.krate(), ty) { let krate = resolver.krate()?;
if let Some(result) =
iterate_inherent_methods(&derefed_ty, db, name, mode, krate, &mut callback) // We have to be careful about the order we're looking at candidates
{ // in here. Consider the case where we're resolving `x.clone()`
return Some(result); // where `x: &Vec<_>`. This resolves to the clone method with self
} // type `Vec<_>`, *not* `&_`. I.e. we need to consider methods where
if let Some(result) = iterate_trait_method_candidates( // the receiver type exactly matches before cases where we have to
&derefed_ty, // do autoref. But in the autoderef steps, the `&_` self type comes
// up *before* the `Vec<_>` self type.
//
// On the other hand, we don't want to just pick any by-value method
// before any by-autoref method; it's just that we need to consider
// the methods by autoderef order of *receiver types*, not *self
// types*.
let deref_chain: Vec<_> = autoderef::autoderef(db, Some(krate), ty.clone()).collect();
for i in 0..deref_chain.len() {
if let Some(result) = iterate_method_candidates_with_autoref(
&deref_chain[i..],
db, db,
resolver, resolver,
name, name,
mode,
&mut callback, &mut callback,
) { ) {
return Some(result); return Some(result);
} }
} }
None
} }
LookupMode::Path => { LookupMode::Path => {
// No autoderef for path lookups // No autoderef for path lookups
if let Some(result) = iterate_method_candidates_for_self_ty(&ty, db, resolver, name, &mut callback)
iterate_inherent_methods(&ty, db, name, mode, krate.into(), &mut callback) }
{ }
return Some(result); }
}
if let Some(result) = fn iterate_method_candidates_with_autoref<T>(
iterate_trait_method_candidates(&ty, db, resolver, name, mode, &mut callback) deref_chain: &[Canonical<Ty>],
{ db: &impl HirDatabase,
return Some(result); resolver: &Resolver,
} name: Option<&Name>,
mut callback: impl FnMut(&Ty, AssocItemId) -> Option<T>,
) -> Option<T> {
if let Some(result) = iterate_method_candidates_by_receiver(
&deref_chain[0],
&deref_chain[1..],
db,
resolver,
name,
&mut callback,
) {
return Some(result);
}
let refed = Canonical {
num_vars: deref_chain[0].num_vars,
value: Ty::apply_one(TypeCtor::Ref(Mutability::Shared), deref_chain[0].value.clone()),
};
if let Some(result) = iterate_method_candidates_by_receiver(
&refed,
deref_chain,
db,
resolver,
name,
&mut callback,
) {
return Some(result);
}
let ref_muted = Canonical {
num_vars: deref_chain[0].num_vars,
value: Ty::apply_one(TypeCtor::Ref(Mutability::Mut), deref_chain[0].value.clone()),
};
if let Some(result) = iterate_method_candidates_by_receiver(
&ref_muted,
deref_chain,
db,
resolver,
name,
&mut callback,
) {
return Some(result);
}
None
}
fn iterate_method_candidates_by_receiver<T>(
receiver_ty: &Canonical<Ty>,
rest_of_deref_chain: &[Canonical<Ty>],
db: &impl HirDatabase,
resolver: &Resolver,
name: Option<&Name>,
mut callback: impl FnMut(&Ty, AssocItemId) -> Option<T>,
) -> Option<T> {
// We're looking for methods with *receiver* type receiver_ty. These could
// be found in any of the derefs of receiver_ty, so we have to go through
// that.
let krate = resolver.krate()?;
for self_ty in std::iter::once(receiver_ty).chain(rest_of_deref_chain) {
if let Some(result) =
iterate_inherent_methods(self_ty, db, name, Some(receiver_ty), krate, &mut callback)
{
return Some(result);
}
}
for self_ty in std::iter::once(receiver_ty).chain(rest_of_deref_chain) {
if let Some(result) = iterate_trait_method_candidates(
self_ty,
db,
resolver,
name,
Some(receiver_ty),
&mut callback,
) {
return Some(result);
} }
} }
None None
} }
fn iterate_trait_method_candidates<T>( fn iterate_method_candidates_for_self_ty<T>(
ty: &Canonical<Ty>, self_ty: &Canonical<Ty>,
db: &impl HirDatabase, db: &impl HirDatabase,
resolver: &Resolver, resolver: &Resolver,
name: Option<&Name>, name: Option<&Name>,
mode: LookupMode, mut callback: impl FnMut(&Ty, AssocItemId) -> Option<T>,
) -> Option<T> {
let krate = resolver.krate()?;
if let Some(result) = iterate_inherent_methods(self_ty, db, name, None, krate, &mut callback) {
return Some(result);
}
if let Some(result) =
iterate_trait_method_candidates(self_ty, db, resolver, name, None, &mut callback)
{
return Some(result);
}
None
}
fn iterate_trait_method_candidates<T>(
self_ty: &Canonical<Ty>,
db: &impl HirDatabase,
resolver: &Resolver,
name: Option<&Name>,
receiver_ty: Option<&Canonical<Ty>>,
mut callback: impl FnMut(&Ty, AssocItemId) -> Option<T>, mut callback: impl FnMut(&Ty, AssocItemId) -> Option<T>,
) -> Option<T> { ) -> Option<T> {
let krate = resolver.krate()?; let krate = resolver.krate()?;
// FIXME: maybe put the trait_env behind a query (need to figure out good input parameters for that) // FIXME: maybe put the trait_env behind a query (need to figure out good input parameters for that)
let env = TraitEnvironment::lower(db, resolver); let env = TraitEnvironment::lower(db, resolver);
// if ty is `impl Trait` or `dyn Trait`, the trait doesn't need to be in scope // if ty is `impl Trait` or `dyn Trait`, the trait doesn't need to be in scope
let inherent_trait = ty.value.inherent_trait().into_iter(); let inherent_trait = self_ty.value.inherent_trait().into_iter();
// if we have `T: Trait` in the param env, the trait doesn't need to be in scope // if we have `T: Trait` in the param env, the trait doesn't need to be in scope
let traits_from_env = env let traits_from_env = env
.trait_predicates_for_self_ty(&ty.value) .trait_predicates_for_self_ty(&self_ty.value)
.map(|tr| tr.trait_) .map(|tr| tr.trait_)
.flat_map(|t| all_super_traits(db, t)); .flat_map(|t| all_super_traits(db, t));
let traits = let traits =
@ -251,17 +354,17 @@ fn iterate_trait_method_candidates<T>(
// iteration // iteration
let mut known_implemented = false; let mut known_implemented = false;
for (_name, item) in data.items.iter() { for (_name, item) in data.items.iter() {
if !is_valid_candidate(db, name, mode, (*item).into()) { if !is_valid_candidate(db, name, receiver_ty, (*item).into(), self_ty) {
continue; continue;
} }
if !known_implemented { if !known_implemented {
let goal = generic_implements_goal(db, env.clone(), t, ty.clone()); let goal = generic_implements_goal(db, env.clone(), t, self_ty.clone());
if db.trait_solve(krate.into(), goal).is_none() { if db.trait_solve(krate.into(), goal).is_none() {
continue 'traits; continue 'traits;
} }
} }
known_implemented = true; known_implemented = true;
if let Some(result) = callback(&ty.value, (*item).into()) { if let Some(result) = callback(&self_ty.value, (*item).into()) {
return Some(result); return Some(result);
} }
} }
@ -270,22 +373,22 @@ fn iterate_trait_method_candidates<T>(
} }
fn iterate_inherent_methods<T>( fn iterate_inherent_methods<T>(
ty: &Canonical<Ty>, self_ty: &Canonical<Ty>,
db: &impl HirDatabase, db: &impl HirDatabase,
name: Option<&Name>, name: Option<&Name>,
mode: LookupMode, receiver_ty: Option<&Canonical<Ty>>,
krate: CrateId, krate: CrateId,
mut callback: impl FnMut(&Ty, AssocItemId) -> Option<T>, mut callback: impl FnMut(&Ty, AssocItemId) -> Option<T>,
) -> Option<T> { ) -> Option<T> {
for krate in ty.value.def_crates(db, krate)? { for krate in self_ty.value.def_crates(db, krate)? {
let impls = db.impls_in_crate(krate); let impls = db.impls_in_crate(krate);
for impl_block in impls.lookup_impl_blocks(&ty.value) { for impl_block in impls.lookup_impl_blocks(&self_ty.value) {
for &item in db.impl_data(impl_block).items.iter() { for &item in db.impl_data(impl_block).items.iter() {
if !is_valid_candidate(db, name, mode, item) { if !is_valid_candidate(db, name, receiver_ty, item, self_ty) {
continue; continue;
} }
if let Some(result) = callback(&ty.value, item.into()) { if let Some(result) = callback(&self_ty.value, item) {
return Some(result); return Some(result);
} }
} }
@ -297,23 +400,68 @@ fn iterate_inherent_methods<T>(
fn is_valid_candidate( fn is_valid_candidate(
db: &impl HirDatabase, db: &impl HirDatabase,
name: Option<&Name>, name: Option<&Name>,
mode: LookupMode, receiver_ty: Option<&Canonical<Ty>>,
item: AssocItemId, item: AssocItemId,
self_ty: &Canonical<Ty>,
) -> bool { ) -> bool {
match item { match item {
AssocItemId::FunctionId(m) => { AssocItemId::FunctionId(m) => {
let data = db.function_data(m); let data = db.function_data(m);
name.map_or(true, |name| &data.name == name) if let Some(name) = name {
&& (data.has_self_param || mode == LookupMode::Path) if &data.name != name {
return false;
}
}
if let Some(receiver_ty) = receiver_ty {
if !data.has_self_param {
return false;
}
let transformed_receiver_ty = match transform_receiver_ty(db, m, self_ty) {
Some(ty) => ty,
None => return false,
};
if transformed_receiver_ty != receiver_ty.value {
return false;
}
}
true
} }
AssocItemId::ConstId(c) => { AssocItemId::ConstId(c) => {
let data = db.const_data(c); let data = db.const_data(c);
name.map_or(true, |name| data.name.as_ref() == Some(name)) && (mode == LookupMode::Path) name.map_or(true, |name| data.name.as_ref() == Some(name)) && receiver_ty.is_none()
} }
_ => false, _ => false,
} }
} }
pub(crate) fn inherent_impl_substs(
db: &impl HirDatabase,
impl_id: ImplId,
self_ty: &Canonical<Ty>,
) -> Option<Substs> {
let vars = Substs::build_for_def(db, impl_id).fill_with_bound_vars(0).build();
let self_ty_with_vars = db.impl_self_ty(impl_id).subst(&vars);
let self_ty_with_vars = Canonical { num_vars: vars.len(), value: self_ty_with_vars };
super::infer::unify(&self_ty_with_vars, self_ty)
}
fn transform_receiver_ty(
db: &impl HirDatabase,
function_id: FunctionId,
self_ty: &Canonical<Ty>,
) -> Option<Ty> {
let substs = match function_id.lookup(db).container {
AssocContainerId::TraitId(_) => Substs::build_for_def(db, function_id)
.push(self_ty.value.clone())
.fill_with_unknown()
.build(),
AssocContainerId::ImplId(impl_id) => inherent_impl_substs(db, impl_id, &self_ty)?,
AssocContainerId::ContainerId(_) => unreachable!(),
};
let sig = db.callable_item_signature(function_id.into());
Some(sig.params()[0].clone().subst(&substs))
}
pub fn implements_trait( pub fn implements_trait(
ty: &Canonical<Ty>, ty: &Canonical<Ty>,
db: &impl HirDatabase, db: &impl HirDatabase,

View file

@ -74,7 +74,7 @@ impl TestDB {
for &krate in self.relevant_crates(file_id).iter() { for &krate in self.relevant_crates(file_id).iter() {
let crate_def_map = self.crate_def_map(krate); let crate_def_map = self.crate_def_map(krate);
for (local_id, data) in crate_def_map.modules.iter() { for (local_id, data) in crate_def_map.modules.iter() {
if data.definition == Some(file_id) { if data.origin.file_id() == Some(file_id) {
return ModuleId { krate, local_id }; return ModuleId { krate, local_id };
} }
} }
@ -98,7 +98,7 @@ impl TestDB {
} }
} }
for &impl_id in crate_def_map[module_id].impls.iter() { for impl_id in crate_def_map[module_id].scope.impls() {
let impl_data = self.impl_data(impl_id); let impl_data = self.impl_data(impl_id);
for item in impl_data.items.iter() { for item in impl_data.items.iter() {
if let AssocItemId::FunctionId(f) = item { if let AssocItemId::FunctionId(f) = item {

File diff suppressed because it is too large Load diff

View file

@ -1,3 +1,4 @@
use super::infer_with_mismatches;
use insta::assert_snapshot; use insta::assert_snapshot;
use test_utils::covers; use test_utils::covers;
@ -367,3 +368,161 @@ fn test() {
"### "###
); );
} }
#[test]
fn return_coerce_unknown() {
assert_snapshot!(
infer_with_mismatches(r#"
fn foo() -> u32 {
return unknown;
}
"#, true),
@r###"
[17; 40) '{ ...own; }': !
[23; 37) 'return unknown': !
[30; 37) 'unknown': u32
"###
);
}
#[test]
fn coerce_autoderef() {
assert_snapshot!(
infer_with_mismatches(r#"
struct Foo;
fn takes_ref_foo(x: &Foo) {}
fn test() {
takes_ref_foo(&Foo);
takes_ref_foo(&&Foo);
takes_ref_foo(&&&Foo);
}
"#, true),
@r###"
[30; 31) 'x': &Foo
[39; 41) '{}': ()
[52; 133) '{ ...oo); }': ()
[58; 71) 'takes_ref_foo': fn takes_ref_foo(&Foo) -> ()
[58; 77) 'takes_...(&Foo)': ()
[72; 76) '&Foo': &Foo
[73; 76) 'Foo': Foo
[83; 96) 'takes_ref_foo': fn takes_ref_foo(&Foo) -> ()
[83; 103) 'takes_...&&Foo)': ()
[97; 102) '&&Foo': &&Foo
[98; 102) '&Foo': &Foo
[99; 102) 'Foo': Foo
[109; 122) 'takes_ref_foo': fn takes_ref_foo(&Foo) -> ()
[109; 130) 'takes_...&&Foo)': ()
[123; 129) '&&&Foo': &&&Foo
[124; 129) '&&Foo': &&Foo
[125; 129) '&Foo': &Foo
[126; 129) 'Foo': Foo
"###
);
}
#[test]
fn coerce_autoderef_generic() {
assert_snapshot!(
infer_with_mismatches(r#"
struct Foo;
fn takes_ref<T>(x: &T) -> T { *x }
fn test() {
takes_ref(&Foo);
takes_ref(&&Foo);
takes_ref(&&&Foo);
}
"#, true),
@r###"
[29; 30) 'x': &T
[41; 47) '{ *x }': T
[43; 45) '*x': T
[44; 45) 'x': &T
[58; 127) '{ ...oo); }': ()
[64; 73) 'takes_ref': fn takes_ref<Foo>(&T) -> T
[64; 79) 'takes_ref(&Foo)': Foo
[74; 78) '&Foo': &Foo
[75; 78) 'Foo': Foo
[85; 94) 'takes_ref': fn takes_ref<&Foo>(&T) -> T
[85; 101) 'takes_...&&Foo)': &Foo
[95; 100) '&&Foo': &&Foo
[96; 100) '&Foo': &Foo
[97; 100) 'Foo': Foo
[107; 116) 'takes_ref': fn takes_ref<&&Foo>(&T) -> T
[107; 124) 'takes_...&&Foo)': &&Foo
[117; 123) '&&&Foo': &&&Foo
[118; 123) '&&Foo': &&Foo
[119; 123) '&Foo': &Foo
[120; 123) 'Foo': Foo
"###
);
}
#[test]
fn closure_return_coerce() {
assert_snapshot!(
infer_with_mismatches(r#"
fn foo() {
let x = || {
if true {
return &1u32;
}
&&1u32
};
}
"#, true),
@r###"
[10; 106) '{ ... }; }': ()
[20; 21) 'x': || -> &u32
[24; 103) '|| { ... }': || -> &u32
[27; 103) '{ ... }': &u32
[37; 82) 'if tru... }': ()
[40; 44) 'true': bool
[45; 82) '{ ... }': !
[59; 71) 'return &1u32': !
[66; 71) '&1u32': &u32
[67; 71) '1u32': u32
[91; 97) '&&1u32': &&u32
[92; 97) '&1u32': &u32
[93; 97) '1u32': u32
"###
);
}
#[test]
fn coerce_fn_item_to_fn_ptr() {
assert_snapshot!(
infer_with_mismatches(r#"
fn foo(x: u32) -> isize { 1 }
fn test() {
let f: fn(u32) -> isize = foo;
}
"#, true),
@r###"
[8; 9) 'x': u32
[25; 30) '{ 1 }': isize
[27; 28) '1': isize
[41; 79) '{ ...foo; }': ()
[51; 52) 'f': fn(u32) -> isize
[73; 76) 'foo': fn foo(u32) -> isize
"###
);
}
#[test]
fn coerce_closure_to_fn_ptr() {
assert_snapshot!(
infer_with_mismatches(r#"
fn test() {
let f: fn(u32) -> isize = |x| { 1 };
}
"#, true),
@r###"
[11; 55) '{ ...1 }; }': ()
[21; 22) 'f': fn(u32) -> isize
[43; 52) '|x| { 1 }': |u32| -> isize
[44; 45) 'x': u32
[47; 52) '{ 1 }': isize
[49; 50) '1': isize
"###
);
}

View file

@ -0,0 +1,390 @@
use super::{infer, type_at, type_at_pos};
use crate::test_db::TestDB;
use insta::assert_snapshot;
use ra_db::fixture::WithFixture;
#[test]
fn cfg_impl_block() {
let (db, pos) = TestDB::with_position(
r#"
//- /main.rs crate:main deps:foo cfg:test
use foo::S as T;
struct S;
#[cfg(test)]
impl S {
fn foo1(&self) -> i32 { 0 }
}
#[cfg(not(test))]
impl S {
fn foo2(&self) -> i32 { 0 }
}
fn test() {
let t = (S.foo1(), S.foo2(), T.foo3(), T.foo4());
t<|>;
}
//- /foo.rs crate:foo
struct S;
#[cfg(not(test))]
impl S {
fn foo3(&self) -> i32 { 0 }
}
#[cfg(test)]
impl S {
fn foo4(&self) -> i32 { 0 }
}
"#,
);
assert_eq!("(i32, {unknown}, i32, {unknown})", type_at_pos(&db, pos));
}
#[test]
fn infer_macros_expanded() {
assert_snapshot!(
infer(r#"
struct Foo(Vec<i32>);
macro_rules! foo {
($($item:expr),*) => {
{
Foo(vec![$($item,)*])
}
};
}
fn main() {
let x = foo!(1,2);
}
"#),
@r###"
![0; 17) '{Foo(v...,2,])}': Foo
![1; 4) 'Foo': Foo({unknown}) -> Foo
![1; 16) 'Foo(vec![1,2,])': Foo
![5; 15) 'vec![1,2,]': {unknown}
[156; 182) '{ ...,2); }': ()
[166; 167) 'x': Foo
"###
);
}
#[test]
fn infer_legacy_textual_scoped_macros_expanded() {
assert_snapshot!(
infer(r#"
struct Foo(Vec<i32>);
#[macro_use]
mod m {
macro_rules! foo {
($($item:expr),*) => {
{
Foo(vec![$($item,)*])
}
};
}
}
fn main() {
let x = foo!(1,2);
let y = crate::foo!(1,2);
}
"#),
@r###"
![0; 17) '{Foo(v...,2,])}': Foo
![1; 4) 'Foo': Foo({unknown}) -> Foo
![1; 16) 'Foo(vec![1,2,])': Foo
![5; 15) 'vec![1,2,]': {unknown}
[195; 251) '{ ...,2); }': ()
[205; 206) 'x': Foo
[228; 229) 'y': {unknown}
[232; 248) 'crate:...!(1,2)': {unknown}
"###
);
}
#[test]
fn infer_path_qualified_macros_expanded() {
assert_snapshot!(
infer(r#"
#[macro_export]
macro_rules! foo {
() => { 42i32 }
}
mod m {
pub use super::foo as bar;
}
fn main() {
let x = crate::foo!();
let y = m::bar!();
}
"#),
@r###"
![0; 5) '42i32': i32
![0; 5) '42i32': i32
[111; 164) '{ ...!(); }': ()
[121; 122) 'x': i32
[148; 149) 'y': i32
"###
);
}
#[test]
fn infer_type_value_macro_having_same_name() {
assert_snapshot!(
infer(r#"
#[macro_export]
macro_rules! foo {
() => {
mod foo {
pub use super::foo;
}
};
($x:tt) => {
$x
};
}
foo!();
fn foo() {
let foo = foo::foo!(42i32);
}
"#),
@r###"
![0; 5) '42i32': i32
[171; 206) '{ ...32); }': ()
[181; 184) 'foo': i32
"###
);
}
#[test]
fn processes_impls_generated_by_macros() {
let t = type_at(
r#"
//- /main.rs
macro_rules! m {
($ident:ident) => (impl Trait for $ident {})
}
trait Trait { fn foo(self) -> u128 {} }
struct S;
m!(S);
fn test() { S.foo()<|>; }
"#,
);
assert_eq!(t, "u128");
}
#[test]
fn infer_impl_items_generated_by_macros() {
let t = type_at(
r#"
//- /main.rs
macro_rules! m {
() => (fn foo(&self) -> u128 {0})
}
struct S;
impl S {
m!();
}
fn test() { S.foo()<|>; }
"#,
);
assert_eq!(t, "u128");
}
#[test]
fn infer_impl_items_generated_by_macros_chain() {
let t = type_at(
r#"
//- /main.rs
macro_rules! m_inner {
() => {fn foo(&self) -> u128 {0}}
}
macro_rules! m {
() => {m_inner!();}
}
struct S;
impl S {
m!();
}
fn test() { S.foo()<|>; }
"#,
);
assert_eq!(t, "u128");
}
#[test]
fn infer_macro_with_dollar_crate_is_correct_in_expr() {
let (db, pos) = TestDB::with_position(
r#"
//- /main.rs crate:main deps:foo
fn test() {
let x = (foo::foo!(1), foo::foo!(2));
x<|>;
}
//- /lib.rs crate:foo
#[macro_export]
macro_rules! foo {
(1) => { $crate::bar!() };
(2) => { 1 + $crate::baz() };
}
#[macro_export]
macro_rules! bar {
() => { 42 }
}
pub fn baz() -> usize { 31usize }
"#,
);
assert_eq!("(i32, usize)", type_at_pos(&db, pos));
}
#[test]
fn infer_type_value_non_legacy_macro_use_as() {
assert_snapshot!(
infer(r#"
mod m {
macro_rules! _foo {
($x:ident) => { type $x = u64; }
}
pub(crate) use _foo as foo;
}
m::foo!(foo);
use foo as bar;
fn f() -> bar { 0 }
fn main() {
let _a = f();
}
"#),
@r###"
[159; 164) '{ 0 }': u64
[161; 162) '0': u64
[175; 199) '{ ...f(); }': ()
[187; 189) '_a': u64
[193; 194) 'f': fn f() -> u64
[193; 196) 'f()': u64
"###
);
}
#[test]
fn infer_builtin_macros_line() {
assert_snapshot!(
infer(r#"
#[rustc_builtin_macro]
macro_rules! line {() => {}}
fn main() {
let x = line!();
}
"#),
@r###"
![0; 1) '6': i32
[64; 88) '{ ...!(); }': ()
[74; 75) 'x': i32
"###
);
}
#[test]
fn infer_builtin_macros_file() {
assert_snapshot!(
infer(r#"
#[rustc_builtin_macro]
macro_rules! file {() => {}}
fn main() {
let x = file!();
}
"#),
@r###"
![0; 2) '""': &str
[64; 88) '{ ...!(); }': ()
[74; 75) 'x': &str
"###
);
}
#[test]
fn infer_builtin_macros_column() {
assert_snapshot!(
infer(r#"
#[rustc_builtin_macro]
macro_rules! column {() => {}}
fn main() {
let x = column!();
}
"#),
@r###"
![0; 2) '13': i32
[66; 92) '{ ...!(); }': ()
[76; 77) 'x': i32
"###
);
}
#[test]
fn infer_derive_clone_simple() {
let (db, pos) = TestDB::with_position(
r#"
//- /main.rs crate:main deps:std
#[derive(Clone)]
struct S;
fn test() {
S.clone()<|>;
}
//- /lib.rs crate:std
#[prelude_import]
use clone::*;
mod clone {
trait Clone {
fn clone(&self) -> Self;
}
}
"#,
);
assert_eq!("S", type_at_pos(&db, pos));
}
#[test]
fn infer_derive_clone_with_params() {
let (db, pos) = TestDB::with_position(
r#"
//- /main.rs crate:main deps:std
#[derive(Clone)]
struct S;
#[derive(Clone)]
struct Wrapper<T>(T);
struct NonClone;
fn test() {
(Wrapper(S).clone(), Wrapper(NonClone).clone())<|>;
}
//- /lib.rs crate:std
#[prelude_import]
use clone::*;
mod clone {
trait Clone {
fn clone(&self) -> Self;
}
}
"#,
);
assert_eq!("(Wrapper<S>, {unknown})", type_at_pos(&db, pos));
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,238 @@
use super::infer;
use insta::assert_snapshot;
use test_utils::covers;
#[test]
fn infer_pattern() {
assert_snapshot!(
infer(r#"
fn test(x: &i32) {
let y = x;
let &z = x;
let a = z;
let (c, d) = (1, "hello");
for (e, f) in some_iter {
let g = e;
}
if let [val] = opt {
let h = val;
}
let lambda = |a: u64, b, c: i32| { a + b; c };
let ref ref_to_x = x;
let mut mut_x = x;
let ref mut mut_ref_to_x = x;
let k = mut_ref_to_x;
}
"#),
@r###"
[9; 10) 'x': &i32
[18; 369) '{ ...o_x; }': ()
[28; 29) 'y': &i32
[32; 33) 'x': &i32
[43; 45) '&z': &i32
[44; 45) 'z': i32
[48; 49) 'x': &i32
[59; 60) 'a': i32
[63; 64) 'z': i32
[74; 80) '(c, d)': (i32, &str)
[75; 76) 'c': i32
[78; 79) 'd': &str
[83; 95) '(1, "hello")': (i32, &str)
[84; 85) '1': i32
[87; 94) '"hello"': &str
[102; 152) 'for (e... }': ()
[106; 112) '(e, f)': ({unknown}, {unknown})
[107; 108) 'e': {unknown}
[110; 111) 'f': {unknown}
[116; 125) 'some_iter': {unknown}
[126; 152) '{ ... }': ()
[140; 141) 'g': {unknown}
[144; 145) 'e': {unknown}
[158; 205) 'if let... }': ()
[165; 170) '[val]': {unknown}
[173; 176) 'opt': {unknown}
[177; 205) '{ ... }': ()
[191; 192) 'h': {unknown}
[195; 198) 'val': {unknown}
[215; 221) 'lambda': |u64, u64, i32| -> i32
[224; 256) '|a: u6...b; c }': |u64, u64, i32| -> i32
[225; 226) 'a': u64
[233; 234) 'b': u64
[236; 237) 'c': i32
[244; 256) '{ a + b; c }': i32
[246; 247) 'a': u64
[246; 251) 'a + b': u64
[250; 251) 'b': u64
[253; 254) 'c': i32
[267; 279) 'ref ref_to_x': &&i32
[282; 283) 'x': &i32
[293; 302) 'mut mut_x': &i32
[305; 306) 'x': &i32
[316; 336) 'ref mu...f_to_x': &mut &i32
[339; 340) 'x': &i32
[350; 351) 'k': &mut &i32
[354; 366) 'mut_ref_to_x': &mut &i32
"###
);
}
#[test]
fn infer_pattern_match_ergonomics() {
assert_snapshot!(
infer(r#"
struct A<T>(T);
fn test() {
let A(n) = &A(1);
let A(n) = &mut A(1);
}
"#),
@r###"
[28; 79) '{ ...(1); }': ()
[38; 42) 'A(n)': A<i32>
[40; 41) 'n': &i32
[45; 50) '&A(1)': &A<i32>
[46; 47) 'A': A<i32>(T) -> A<T>
[46; 50) 'A(1)': A<i32>
[48; 49) '1': i32
[60; 64) 'A(n)': A<i32>
[62; 63) 'n': &mut i32
[67; 76) '&mut A(1)': &mut A<i32>
[72; 73) 'A': A<i32>(T) -> A<T>
[72; 76) 'A(1)': A<i32>
[74; 75) '1': i32
"###
);
}
#[test]
fn infer_pattern_match_ergonomics_ref() {
covers!(match_ergonomics_ref);
assert_snapshot!(
infer(r#"
fn test() {
let v = &(1, &2);
let (_, &w) = v;
}
"#),
@r###"
[11; 57) '{ ...= v; }': ()
[21; 22) 'v': &(i32, &i32)
[25; 33) '&(1, &2)': &(i32, &i32)
[26; 33) '(1, &2)': (i32, &i32)
[27; 28) '1': i32
[30; 32) '&2': &i32
[31; 32) '2': i32
[43; 50) '(_, &w)': (i32, &i32)
[44; 45) '_': i32
[47; 49) '&w': &i32
[48; 49) 'w': i32
[53; 54) 'v': &(i32, &i32)
"###
);
}
#[test]
fn infer_adt_pattern() {
assert_snapshot!(
infer(r#"
enum E {
A { x: usize },
B
}
struct S(u32, E);
fn test() {
let e = E::A { x: 3 };
let S(y, z) = foo;
let E::A { x: new_var } = e;
match e {
E::A { x } => x,
E::B if foo => 1,
E::B => 10,
};
let ref d @ E::A { .. } = e;
d;
}
"#),
@r###"
[68; 289) '{ ... d; }': ()
[78; 79) 'e': E
[82; 95) 'E::A { x: 3 }': E
[92; 93) '3': usize
[106; 113) 'S(y, z)': S
[108; 109) 'y': u32
[111; 112) 'z': E
[116; 119) 'foo': S
[129; 148) 'E::A {..._var }': E
[139; 146) 'new_var': usize
[151; 152) 'e': E
[159; 245) 'match ... }': usize
[165; 166) 'e': E
[177; 187) 'E::A { x }': E
[184; 185) 'x': usize
[191; 192) 'x': usize
[202; 206) 'E::B': E
[210; 213) 'foo': bool
[217; 218) '1': usize
[228; 232) 'E::B': E
[236; 238) '10': usize
[256; 275) 'ref d ...{ .. }': &E
[264; 275) 'E::A { .. }': E
[278; 279) 'e': E
[285; 286) 'd': &E
"###
);
}
#[test]
fn infer_generics_in_patterns() {
assert_snapshot!(
infer(r#"
struct A<T> {
x: T,
}
enum Option<T> {
Some(T),
None,
}
fn test(a1: A<u32>, o: Option<u64>) {
let A { x: x2 } = a1;
let A::<i64> { x: x3 } = A { x: 1 };
match o {
Option::Some(t) => t,
_ => 1,
};
}
"#),
@r###"
[79; 81) 'a1': A<u32>
[91; 92) 'o': Option<u64>
[107; 244) '{ ... }; }': ()
[117; 128) 'A { x: x2 }': A<u32>
[124; 126) 'x2': u32
[131; 133) 'a1': A<u32>
[143; 161) 'A::<i6...: x3 }': A<i64>
[157; 159) 'x3': i64
[164; 174) 'A { x: 1 }': A<i64>
[171; 172) '1': i64
[180; 241) 'match ... }': u64
[186; 187) 'o': Option<u64>
[198; 213) 'Option::Some(t)': Option<u64>
[211; 212) 't': u64
[217; 218) 't': u64
[228; 229) '_': Option<u64>
[233; 234) '1': u64
"###
);
}

View file

@ -0,0 +1,333 @@
use super::infer;
use insta::assert_snapshot;
use test_utils::covers;
#[test]
fn bug_484() {
assert_snapshot!(
infer(r#"
fn test() {
let x = if true {};
}
"#),
@r###"
[11; 37) '{ l... {}; }': ()
[20; 21) 'x': ()
[24; 34) 'if true {}': ()
[27; 31) 'true': bool
[32; 34) '{}': ()
"###
);
}
#[test]
fn no_panic_on_field_of_enum() {
assert_snapshot!(
infer(r#"
enum X {}
fn test(x: X) {
x.some_field;
}
"#),
@r###"
[20; 21) 'x': X
[26; 47) '{ ...eld; }': ()
[32; 33) 'x': X
[32; 44) 'x.some_field': {unknown}
"###
);
}
#[test]
fn bug_585() {
assert_snapshot!(
infer(r#"
fn test() {
X {};
match x {
A::B {} => (),
A::Y() => (),
}
}
"#),
@r###"
[11; 89) '{ ... } }': ()
[17; 21) 'X {}': {unknown}
[27; 87) 'match ... }': ()
[33; 34) 'x': {unknown}
[45; 52) 'A::B {}': {unknown}
[56; 58) '()': ()
[68; 74) 'A::Y()': {unknown}
[78; 80) '()': ()
"###
);
}
#[test]
fn bug_651() {
assert_snapshot!(
infer(r#"
fn quux() {
let y = 92;
1 + y;
}
"#),
@r###"
[11; 41) '{ ...+ y; }': ()
[21; 22) 'y': i32
[25; 27) '92': i32
[33; 34) '1': i32
[33; 38) '1 + y': i32
[37; 38) 'y': i32
"###
);
}
#[test]
fn recursive_vars() {
covers!(type_var_cycles_resolve_completely);
covers!(type_var_cycles_resolve_as_possible);
assert_snapshot!(
infer(r#"
fn test() {
let y = unknown;
[y, &y];
}
"#),
@r###"
[11; 48) '{ ...&y]; }': ()
[21; 22) 'y': &{unknown}
[25; 32) 'unknown': &{unknown}
[38; 45) '[y, &y]': [&&{unknown};_]
[39; 40) 'y': &{unknown}
[42; 44) '&y': &&{unknown}
[43; 44) 'y': &{unknown}
"###
);
}
#[test]
fn recursive_vars_2() {
covers!(type_var_cycles_resolve_completely);
covers!(type_var_cycles_resolve_as_possible);
assert_snapshot!(
infer(r#"
fn test() {
let x = unknown;
let y = unknown;
[(x, y), (&y, &x)];
}
"#),
@r###"
[11; 80) '{ ...x)]; }': ()
[21; 22) 'x': &&{unknown}
[25; 32) 'unknown': &&{unknown}
[42; 43) 'y': &&{unknown}
[46; 53) 'unknown': &&{unknown}
[59; 77) '[(x, y..., &x)]': [(&&&{unknown}, &&&{unknown});_]
[60; 66) '(x, y)': (&&&{unknown}, &&&{unknown})
[61; 62) 'x': &&{unknown}
[64; 65) 'y': &&{unknown}
[68; 76) '(&y, &x)': (&&&{unknown}, &&&{unknown})
[69; 71) '&y': &&&{unknown}
[70; 71) 'y': &&{unknown}
[73; 75) '&x': &&&{unknown}
[74; 75) 'x': &&{unknown}
"###
);
}
#[test]
fn infer_std_crash_1() {
// caused stack overflow, taken from std
assert_snapshot!(
infer(r#"
enum Maybe<T> {
Real(T),
Fake,
}
fn write() {
match something_unknown {
Maybe::Real(ref mut something) => (),
}
}
"#),
@r###"
[54; 139) '{ ... } }': ()
[60; 137) 'match ... }': ()
[66; 83) 'someth...nknown': Maybe<{unknown}>
[94; 124) 'Maybe:...thing)': Maybe<{unknown}>
[106; 123) 'ref mu...ething': &mut {unknown}
[128; 130) '()': ()
"###
);
}
#[test]
fn infer_std_crash_2() {
covers!(type_var_resolves_to_int_var);
// caused "equating two type variables, ...", taken from std
assert_snapshot!(
infer(r#"
fn test_line_buffer() {
&[0, b'\n', 1, b'\n'];
}
"#),
@r###"
[23; 53) '{ ...n']; }': ()
[29; 50) '&[0, b...b'\n']': &[u8;_]
[30; 50) '[0, b'...b'\n']': [u8;_]
[31; 32) '0': u8
[34; 39) 'b'\n'': u8
[41; 42) '1': u8
[44; 49) 'b'\n'': u8
"###
);
}
#[test]
fn infer_std_crash_3() {
// taken from rustc
assert_snapshot!(
infer(r#"
pub fn compute() {
match nope!() {
SizeSkeleton::Pointer { non_zero: true, tail } => {}
}
}
"#),
@r###"
[18; 108) '{ ... } }': ()
[24; 106) 'match ... }': ()
[30; 37) 'nope!()': {unknown}
[48; 94) 'SizeSk...tail }': {unknown}
[82; 86) 'true': {unknown}
[88; 92) 'tail': {unknown}
[98; 100) '{}': ()
"###
);
}
#[test]
fn infer_std_crash_4() {
// taken from rustc
assert_snapshot!(
infer(r#"
pub fn primitive_type() {
match *self {
BorrowedRef { type_: Primitive(p), ..} => {},
}
}
"#),
@r###"
[25; 106) '{ ... } }': ()
[31; 104) 'match ... }': ()
[37; 42) '*self': {unknown}
[38; 42) 'self': {unknown}
[53; 91) 'Borrow...), ..}': {unknown}
[74; 86) 'Primitive(p)': {unknown}
[84; 85) 'p': {unknown}
[95; 97) '{}': ()
"###
);
}
#[test]
fn infer_std_crash_5() {
// taken from rustc
assert_snapshot!(
infer(r#"
fn extra_compiler_flags() {
for content in doesnt_matter {
let name = if doesnt_matter {
first
} else {
&content
};
let content = if ICE_REPORT_COMPILER_FLAGS_STRIP_VALUE.contains(&name) {
name
} else {
content
};
}
}
"#),
@r###"
[27; 323) '{ ... } }': ()
[33; 321) 'for co... }': ()
[37; 44) 'content': &{unknown}
[48; 61) 'doesnt_matter': {unknown}
[62; 321) '{ ... }': ()
[76; 80) 'name': &&{unknown}
[83; 167) 'if doe... }': &&{unknown}
[86; 99) 'doesnt_matter': bool
[100; 129) '{ ... }': &&{unknown}
[114; 119) 'first': &&{unknown}
[135; 167) '{ ... }': &&{unknown}
[149; 157) '&content': &&{unknown}
[150; 157) 'content': &{unknown}
[182; 189) 'content': &{unknown}
[192; 314) 'if ICE... }': &{unknown}
[195; 232) 'ICE_RE..._VALUE': {unknown}
[195; 248) 'ICE_RE...&name)': bool
[242; 247) '&name': &&&{unknown}
[243; 247) 'name': &&{unknown}
[249; 277) '{ ... }': &&{unknown}
[263; 267) 'name': &&{unknown}
[283; 314) '{ ... }': &{unknown}
[297; 304) 'content': &{unknown}
"###
);
}
#[test]
fn infer_nested_generics_crash() {
// another crash found typechecking rustc
assert_snapshot!(
infer(r#"
struct Canonical<V> {
value: V,
}
struct QueryResponse<V> {
value: V,
}
fn test<R>(query_response: Canonical<QueryResponse<R>>) {
&query_response.value;
}
"#),
@r###"
[92; 106) 'query_response': Canonical<QueryResponse<R>>
[137; 167) '{ ...lue; }': ()
[143; 164) '&query....value': &QueryResponse<R>
[144; 158) 'query_response': Canonical<QueryResponse<R>>
[144; 164) 'query_....value': QueryResponse<R>
"###
);
}
#[test]
fn bug_1030() {
assert_snapshot!(infer(r#"
struct HashSet<T, H>;
struct FxHasher;
type FxHashSet<T> = HashSet<T, FxHasher>;
impl<T, H> HashSet<T, H> {
fn default() -> HashSet<T, H> {}
}
pub fn main_loop() {
FxHashSet::default();
}
"#),
@r###"
[144; 146) '{}': ()
[169; 198) '{ ...t(); }': ()
[175; 193) 'FxHash...efault': fn default<{unknown}, FxHasher>() -> HashSet<T, H>
[175; 195) 'FxHash...ault()': HashSet<{unknown}, FxHasher>
"###
);
}

File diff suppressed because it is too large Load diff

Some files were not shown because too many files have changed in this diff Show more