Merge remote-tracking branch 'upstream/master' into issue961_profiling

This commit is contained in:
Sergey Parilin 2019-04-02 17:55:14 +03:00
commit b74449e995
54 changed files with 1943 additions and 1180 deletions

113
Cargo.lock generated
View file

@ -76,15 +76,15 @@ dependencies = [
[[package]]
name = "bit-set"
version = "0.5.0"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bit-vec 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"bit-vec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "bit-vec"
version = "0.5.0"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@ -118,7 +118,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "cargo_metadata"
version = "0.7.3"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"error-chain 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -359,11 +359,11 @@ dependencies = [
[[package]]
name = "flexi_logger"
version = "0.11.1"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -443,6 +443,11 @@ name = "glob"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "glob"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "heck"
version = "0.3.1"
@ -503,7 +508,7 @@ dependencies = [
[[package]]
name = "insta"
version = "0.7.1"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -518,7 +523,7 @@ dependencies = [
"serde 1.0.89 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_yaml 0.8.8 (registry+https://github.com/rust-lang/crates.io-index)",
"uuid 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"uuid 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -589,9 +594,6 @@ dependencies = [
name = "lazy_static"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"spin 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "lazycell"
@ -605,7 +607,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "linked-hash-map"
version = "0.5.1"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@ -855,10 +857,10 @@ dependencies = [
[[package]]
name = "proptest"
version = "0.9.1"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"bit-set 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -911,7 +913,7 @@ dependencies = [
"ra_hir 0.1.0",
"ra_project_model 0.1.0",
"ra_syntax 0.1.0",
"ra_vfs 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_vfs 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"test_utils 0.1.0",
]
@ -922,14 +924,13 @@ version = "0.1.0"
dependencies = [
"clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"flexi_logger 0.11.1 (registry+https://github.com/rust-lang/crates.io-index)",
"flexi_logger 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)",
"indicatif 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_batch 0.1.0",
"ra_db 0.1.0",
"ra_hir 0.1.0",
"ra_ide_api 0.1.0",
"ra_ide_api_light 0.1.0",
"ra_prof 0.1.0",
"ra_syntax 0.1.0",
"tools 0.1.0",
@ -944,7 +945,7 @@ dependencies = [
"ra_syntax 0.1.0",
"relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"salsa 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
"salsa 0.11.1 (registry+https://github.com/rust-lang/crates.io-index)",
"test_utils 0.1.0",
]
@ -962,8 +963,8 @@ version = "0.1.0"
dependencies = [
"arrayvec 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
"ena 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"flexi_logger 0.11.1 (registry+https://github.com/rust-lang/crates.io-index)",
"insta 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"flexi_logger 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)",
"insta 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)",
"join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -983,19 +984,18 @@ name = "ra_ide_api"
version = "0.1.0"
dependencies = [
"fst 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"insta 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"insta 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"jemalloc-ctl 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"jemallocator 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"proptest 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
"proptest 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_assists 0.1.0",
"ra_db 0.1.0",
"ra_fmt 0.1.0",
"ra_hir 0.1.0",
"ra_ide_api_light 0.1.0",
"ra_syntax 0.1.0",
"ra_text_edit 0.1.0",
"rayon 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1006,22 +1006,6 @@ dependencies = [
"unicase 2.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "ra_ide_api_light"
version = "0.1.0"
dependencies = [
"insta 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"proptest 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_fmt 0.1.0",
"ra_syntax 0.1.0",
"ra_text_edit 0.1.0",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"superslice 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"test_utils 0.1.0",
]
[[package]]
name = "ra_lsp_server"
version = "0.1.0"
@ -1029,7 +1013,7 @@ dependencies = [
"crossbeam-channel 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"flexi_logger 0.11.1 (registry+https://github.com/rust-lang/crates.io-index)",
"flexi_logger 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)",
"gen_lsp_server 0.1.0",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"lsp-types 0.56.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1040,7 +1024,7 @@ dependencies = [
"ra_project_model 0.1.0",
"ra_syntax 0.1.0",
"ra_text_edit 0.1.0",
"ra_vfs 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_vfs 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.89 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1080,7 +1064,7 @@ dependencies = [
name = "ra_project_model"
version = "0.1.0"
dependencies = [
"cargo_metadata 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
"cargo_metadata 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_arena 0.1.0",
@ -1115,7 +1099,7 @@ dependencies = [
name = "ra_text_edit"
version = "0.1.0"
dependencies = [
"proptest 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
"proptest 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)",
"test_utils 0.1.0",
"text_unit 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -1129,7 +1113,7 @@ dependencies = [
[[package]]
name = "ra_vfs"
version = "0.2.0"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"crossbeam-channel 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1361,7 +1345,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "salsa"
version = "0.10.0"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"derive-new 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1370,13 +1354,13 @@ dependencies = [
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"salsa-macros 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
"salsa-macros 0.11.1 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "salsa-macros"
version = "0.10.0"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1446,7 +1430,7 @@ version = "0.8.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"dtoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"linked-hash-map 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.89 (registry+https://github.com/rust-lang/crates.io-index)",
"yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -1488,11 +1472,6 @@ dependencies = [
"serde 1.0.89 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "spin"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "stable_deref_trait"
version = "1.1.1"
@ -1786,7 +1765,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "uuid"
version = "0.7.2"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1864,7 +1843,7 @@ name = "yaml-rust"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"linked-hash-map 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[metadata]
@ -1877,14 +1856,14 @@ dependencies = [
"checksum backtrace 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "cd5a90e2b463010cd0e0ce9a11d4a9d5d58d9f41d4a6ba3dcaf9e68b466e88b4"
"checksum backtrace-sys 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)" = "797c830ac25ccc92a7f8a7b9862bde440715531514594a6154e3d4a54dd769b6"
"checksum base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0b25d992356d2eb0ed82172f5248873db5560c4721f564b13cb5193bda5e668e"
"checksum bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6f1efcc46c18245a69c38fcc5cc650f16d3a59d034f3106e9ed63748f695730a"
"checksum bit-vec 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4440d5cb623bb7390ae27fec0bb6c61111969860f8e3ae198bfa0663645e67cf"
"checksum bit-set 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e84c238982c4b1e1ee668d136c510c67a13465279c0cb367ea6baf6310620a80"
"checksum bit-vec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f59bbe95d4e52a6398ec21238d31577f2b28a9d86807f06ca59d191d8440d0bb"
"checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d"
"checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12"
"checksum block-buffer 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a076c298b9ecdb530ed9d967e74a6027d6a7478924520acddcddc24c1c8ab3ab"
"checksum byte-tools 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "560c32574a12a89ecd91f5e742165893f86e3ab98d21f8ea548658eb9eef5f40"
"checksum byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a019b10a2a7cdeb292db131fc8113e57ea2a908f6e7894b0c3c671893b65dbeb"
"checksum cargo_metadata 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "bc796c7161c220089dfc7159e13324979181532850a237576b8fb907dd087c0d"
"checksum cargo_metadata 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "178d62b240c34223f265a4c1e275e37d62da163d421fc8d7f7e3ee340f803c57"
"checksum cc 1.0.31 (registry+https://github.com/rust-lang/crates.io-index)" = "c9ce8bb087aacff865633f0bd5aeaed910fe2fe55b55f4739527f2e023a2e53d"
"checksum cfg-if 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "11d43355396e872eefb45ce6342e4374ed7bc2b3a502d1b28e36d6e23c05d1f4"
"checksum chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "45912881121cb26fad7c38c17ba7daa18764771836b34fab7d3fbd93ed633878"
@ -1913,7 +1892,7 @@ dependencies = [
"checksum failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "ea1063915fd7ef4309e222a5a07cf9c319fb9c7836b1f89b85458672dbb127e1"
"checksum fake-simd 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed"
"checksum filetime 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "a2df5c1a8c4be27e7707789dc42ae65976e60b394afd293d1419ab915833e646"
"checksum flexi_logger 0.11.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4a7878fc9e06c948c6f9cddf571758e0c44786a509e646a094ef13ade3b1aab7"
"checksum flexi_logger 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)" = "005c01dd6942ca46283b7304d14c6d04ec2c87a62f6e62e17c06fb812a574f4a"
"checksum fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5f2a4a2034423744d2cc7ca2068453168dcdb82c438419e639a26bd87839c674"
"checksum fsevent 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)" = "c4bbbf71584aeed076100b5665ac14e3d85eeb31fdbb45fbd41ef9a682b5ec05"
"checksum fsevent-sys 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "1a772d36c338d07a032d5375a36f15f9a7043bf0cb8ce7cee658e037c6032874"
@ -1923,6 +1902,7 @@ dependencies = [
"checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
"checksum generic-array 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ef25c5683767570c2bbd7deba372926a55eaae9982d7726ee2a1050239d45b9d"
"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
"checksum glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
"checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205"
"checksum humansize 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b6cab2627acfc432780848602f3f558f7e9dd427352224b0d9324025796d2a5e"
"checksum idna 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "38f09e0f0b1fb55fdee1f17470ad800da77af5186a1a76c026b679358b7e844e"
@ -1930,7 +1910,7 @@ dependencies = [
"checksum indicatif 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2c60da1c9abea75996b70a931bba6c750730399005b61ccd853cee50ef3d0d0c"
"checksum inotify 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "40b54539f3910d6f84fbf9a643efd6e3aa6e4f001426c0329576128255994718"
"checksum inotify-sys 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e74a1aa87c59aeff6ef2cc2fa62d41bc43f54952f55652656b18a02fd5e356c0"
"checksum insta 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be9f00370d23dc7bd32a4d4506b1a14fb922fa39c576c3300fd25ce5b5dab18f"
"checksum insta 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "03e7d88a87d342ce8bd698516151be43e6eb2e84b683db528696cb4a382f734a"
"checksum iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dbe6e417e7d0975db6512b90796e8ce223145ac4e33c377e4a42882a0e88bb08"
"checksum itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5b8467d9c1cebe26feb08c640139247fac215782d35371ade9a2136ed6085358"
"checksum itoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1306f3464951f30e30d12373d31c79fbd52d236e5e896fd92f96ec7babbbe60b"
@ -1942,7 +1922,7 @@ dependencies = [
"checksum lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bc5729f27f159ddd61f4df6228e827e86643d4d3e7c32183cb30a1c08f604a14"
"checksum lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f"
"checksum libc 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "aab692d7759f5cd8c859e169db98ae5b52c924add2af5fbbca11d12fefb567c1"
"checksum linked-hash-map 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "70fb39025bc7cdd76305867c4eccf2f2dcf6e9a57f5b21a93e1c2d86cd03ec9e"
"checksum linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ae91b68aebc4ddb91978b11a1b02ddd8602a05ec19002801c5666000e05e0f83"
"checksum lock_api 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "62ebf1391f6acad60e5c8b43706dde4582df75c06698ab44511d15016bc2442c"
"checksum log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c84ec4b527950aa83a329754b01dbe3f58361d1c5efacd1f6d68c494d08a17c6"
"checksum lsp-types 0.56.0 (registry+https://github.com/rust-lang/crates.io-index)" = "31954f2cf354421e6f99a48fdcfd5c3113c675a0db311960ffdac0b8d45cf09c"
@ -1970,10 +1950,10 @@ dependencies = [
"checksum pest_generator 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "63120576c4efd69615b5537d3d052257328a4ca82876771d6944424ccfd9f646"
"checksum pest_meta 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f5a3492a4ed208ffc247adcdcc7ba2a95be3104f58877d0d02f0df39bf3efb5e"
"checksum proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)" = "4d317f9caece796be1980837fd5cb3dfec5613ebdb04ad0956deea83ce168915"
"checksum proptest 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8ea66c78d75f2c6e9f304269eaef90899798daecc69f1a625d5a3dd793ff3522"
"checksum proptest 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)" = "24f5844db2f839e97e3021980975f6ebf8691d9b9b2ca67ed3feb38dc3edb52c"
"checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0"
"checksum quote 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)" = "cdd8e04bd9c52e0342b406469d494fcb033be4bdbe5c606016defbb1681411e1"
"checksum ra_vfs 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb1839e4e003d865b58b8b6c231aae6c463dfcd01bfbbddffbdb7662a7b5a627"
"checksum ra_vfs 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d75d08da053ec832676686c72dfe509fdd1e807191a50ac79087466ffefccb1c"
"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca"
"checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef"
"checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b"
@ -1999,8 +1979,8 @@ dependencies = [
"checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8"
"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
"checksum ryu 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "eb9e9b8cde282a9fe6a42dd4681319bfb63f121b8a8ee9439c6f4107e58a46f7"
"checksum salsa 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cad0e2348e2f80725b2980914a08a00267136c3ecf720896d3f7f08eef51e08f"
"checksum salsa-macros 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7c79744109df21b80aef1367669b0a9e4985bc966e76bf0e9321b222ec0b9fbb"
"checksum salsa 0.11.1 (registry+https://github.com/rust-lang/crates.io-index)" = "94f14bbb013866db2fbeceb97ed82cf35f42020edd39d35268f0b11f89399c79"
"checksum salsa-macros 0.11.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1ad27cbae189e9739a96e469d37c6a6deafec36f1282d4fdf4681eae67c9dd39"
"checksum same-file 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8f20c4be53a8a1ff4c1f1b2bd14570d2f634628709752f0702ecdd2b3f9a5267"
"checksum scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "94258f53601af11e6a49f722422f6e3425c52b06245a5cf9bc09908b174f5e27"
"checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
@ -2014,7 +1994,6 @@ dependencies = [
"checksum slug 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b3bc762e6a4b6c6fcaade73e77f9ebc6991b676f88bb2358bddb56560f073373"
"checksum smallvec 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c4488ae950c49d403731982257768f48fada354a5203fe81f9bb6f43ca9002be"
"checksum smol_str 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "9af1035bc5d742ab6b7ab16713e41cc2ffe78cb474f6f43cd696b2d16052007e"
"checksum spin 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "44363f6f51401c34e7be73db0db371c04705d35efbe9f7d6082e03a921a32c55"
"checksum stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dba1a27d3efae4351c8051072d619e3ade2820635c3958d826bfea39d59b54c8"
"checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550"
"checksum superslice 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ab16ced94dbd8a46c82fd81e3ed9a8727dac2977ea869d217bcc4ea1f122e81f"
@ -2048,7 +2027,7 @@ dependencies = [
"checksum url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dd4e7c0d531266369519a4aa4f399d748bd37043b00bde1e4ff1f60a120b355a"
"checksum url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "74e7d099f1ee52f823d4bdd60c93c3602043c728f5db3b97bdb548467f7bddea"
"checksum utf8-ranges 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "796f7e48bef87609f7ade7e06495a87d5cd06c7866e6a5cbfceffc558a243737"
"checksum uuid 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0238db0c5b605dd1cf51de0f21766f97fba2645897024461d6a00c036819a768"
"checksum uuid 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "600ef8213e9f8a0ac1f876e470e90780ae0478eabce7f76aff41b0f4ef0fd5c0"
"checksum vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a"
"checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd"
"checksum walkdir 2.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "9d9d7ed3431229a144296213105a390676cc49c9b6a72bd19f3176c98e129fa1"

View file

@ -51,6 +51,12 @@ We are on the rust-lang Zulip!
https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frls-2.2E0
## Quick Links
* Work List: https://paper.dropbox.com/doc/RLS-2.0-work-list--AZ3BgHKKCtqszbsi3gi6sjchAQ-42vbnxzuKq2lKwW0mkn8Y
* API docs: https://rust-analyzer.github.io/rust-analyzer/ra_ide_api/index.html
* CI: https://travis-ci.org/rust-analyzer/rust-analyzer
## License
Rust analyzer is primarily distributed under the terms of both the MIT

View file

@ -0,0 +1,141 @@
use hir::db::HirDatabase;
use ra_syntax::ast::{AstNode, BinExpr, BinOp};
use crate::{AssistCtx, Assist, AssistId};
/// Flip binary expression assist.
pub(crate) fn flip_binexpr(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let expr = ctx.node_at_offset::<BinExpr>()?;
let lhs = expr.lhs()?.syntax();
let rhs = expr.rhs()?.syntax();
let op_range = expr.op()?.range();
// The assist should be applied only if the cursor is on the operator
let cursor_in_range = ctx.frange.range.is_subrange(&op_range);
if !cursor_in_range {
return None;
}
let action: FlipAction = expr.op_kind()?.into();
// The assist should not be applied for certain operators
if let FlipAction::DontFlip = action {
return None;
}
ctx.add_action(AssistId("flip_binexpr"), "flip binary expression", |edit| {
edit.target(op_range);
if let FlipAction::FlipAndReplaceOp(new_op) = action {
edit.replace(op_range, new_op);
}
edit.replace(lhs.range(), rhs.text());
edit.replace(rhs.range(), lhs.text());
});
ctx.build()
}
enum FlipAction {
// Flip the expression
Flip,
// Flip the expression and replace the operator with this string
FlipAndReplaceOp(&'static str),
// Do not flip the expression
DontFlip,
}
impl From<BinOp> for FlipAction {
fn from(op_kind: BinOp) -> Self {
match op_kind {
BinOp::Assignment => FlipAction::DontFlip,
BinOp::AddAssign => FlipAction::DontFlip,
BinOp::DivAssign => FlipAction::DontFlip,
BinOp::MulAssign => FlipAction::DontFlip,
BinOp::RemAssign => FlipAction::DontFlip,
BinOp::ShrAssign => FlipAction::DontFlip,
BinOp::ShlAssign => FlipAction::DontFlip,
BinOp::SubAssign => FlipAction::DontFlip,
BinOp::BitOrAssign => FlipAction::DontFlip,
BinOp::BitAndAssign => FlipAction::DontFlip,
BinOp::BitXorAssign => FlipAction::DontFlip,
BinOp::GreaterTest => FlipAction::FlipAndReplaceOp("<"),
BinOp::GreaterEqualTest => FlipAction::FlipAndReplaceOp("<="),
BinOp::LesserTest => FlipAction::FlipAndReplaceOp(">"),
BinOp::LesserEqualTest => FlipAction::FlipAndReplaceOp(">="),
_ => FlipAction::Flip,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::helpers::{ check_assist, check_assist_target, check_assist_not_applicable };
#[test]
fn flip_binexpr_target_is_the_op() {
check_assist_target(flip_binexpr, "fn f() { let res = 1 ==<|> 2; }", "==")
}
#[test]
fn flip_binexpr_not_applicable_for_assignment() {
check_assist_not_applicable(flip_binexpr, "fn f() { let mut _x = 1; _x +=<|> 2 }")
}
#[test]
fn flip_binexpr_works_for_eq() {
check_assist(
flip_binexpr,
"fn f() { let res = 1 ==<|> 2; }",
"fn f() { let res = 2 ==<|> 1; }",
)
}
#[test]
fn flip_binexpr_works_for_gt() {
check_assist(
flip_binexpr,
"fn f() { let res = 1 ><|> 2; }",
"fn f() { let res = 2 <<|> 1; }",
)
}
#[test]
fn flip_binexpr_works_for_lteq() {
check_assist(
flip_binexpr,
"fn f() { let res = 1 <=<|> 2; }",
"fn f() { let res = 2 >=<|> 1; }",
)
}
#[test]
fn flip_binexpr_works_for_complex_expr() {
check_assist(
flip_binexpr,
"fn f() { let res = (1 + 1) ==<|> (2 + 2); }",
"fn f() { let res = (2 + 2) ==<|> (1 + 1); }",
)
}
#[test]
fn flip_binexpr_works_inside_match() {
check_assist(
flip_binexpr,
r#"
fn dyn_eq(&self, other: &dyn Diagnostic) -> bool {
match other.downcast_ref::<Self>() {
None => false,
Some(it) => it ==<|> self,
}
}
"#,
r#"
fn dyn_eq(&self, other: &dyn Diagnostic) -> bool {
match other.downcast_ref::<Self>() {
None => false,
Some(it) => self ==<|> it,
}
}
"#,
)
}
}

View file

@ -1,86 +0,0 @@
use hir::db::HirDatabase;
use ra_syntax::ast::{AstNode, BinExpr, BinOp};
use crate::{AssistCtx, Assist, AssistId};
pub(crate) fn flip_eq_operands(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let expr = ctx.node_at_offset::<BinExpr>()?;
let lhs = expr.lhs()?.syntax();
let rhs = expr.rhs()?.syntax();
let op_range = expr.op()?.range();
let cursor_in_range = ctx.frange.range.is_subrange(&op_range);
let allowed_ops = [BinOp::EqualityTest, BinOp::NegatedEqualityTest];
let expr_op = expr.op_kind()?;
if !cursor_in_range || !allowed_ops.iter().any(|o| *o == expr_op) {
return None;
}
ctx.add_action(AssistId("flip_eq_operands"), "flip equality operands", |edit| {
edit.target(op_range);
edit.replace(lhs.range(), rhs.text());
edit.replace(rhs.range(), lhs.text());
});
ctx.build()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::helpers::{check_assist, check_assist_target};
#[test]
fn flip_eq_operands_for_simple_stmt() {
check_assist(
flip_eq_operands,
"fn f() { let res = 1 ==<|> 2; }",
"fn f() { let res = 2 ==<|> 1; }",
)
}
#[test]
fn flip_neq_operands_for_simple_stmt() {
check_assist(
flip_eq_operands,
"fn f() { let res = 1 !=<|> 2; }",
"fn f() { let res = 2 !=<|> 1; }",
)
}
#[test]
fn flip_eq_operands_for_complex_stmt() {
check_assist(
flip_eq_operands,
"fn f() { let res = (1 + 1) ==<|> (2 + 2); }",
"fn f() { let res = (2 + 2) ==<|> (1 + 1); }",
)
}
#[test]
fn flip_eq_operands_in_match_expr() {
check_assist(
flip_eq_operands,
r#"
fn dyn_eq(&self, other: &dyn Diagnostic) -> bool {
match other.downcast_ref::<Self>() {
None => false,
Some(it) => it ==<|> self,
}
}
"#,
r#"
fn dyn_eq(&self, other: &dyn Diagnostic) -> bool {
match other.downcast_ref::<Self>() {
None => false,
Some(it) => self ==<|> it,
}
}
"#,
)
}
#[test]
fn flip_eq_operands_target() {
check_assist_target(flip_eq_operands, "fn f() { let res = 1 ==<|> 2; }", "==")
}
}

View file

@ -0,0 +1,298 @@
use hir::db::HirDatabase;
use hir::source_binder::function_from_child_node;
use ra_syntax::{ast::{self, AstNode}, TextRange};
use ra_syntax::ast::{PatKind, ExprKind};
use crate::{Assist, AssistCtx, AssistId};
use crate::assist_ctx::AssistBuilder;
pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let let_stmt = ctx.node_at_offset::<ast::LetStmt>()?;
let bind_pat = match let_stmt.pat()?.kind() {
PatKind::BindPat(pat) => pat,
_ => return None,
};
if bind_pat.is_mutable() {
return None;
}
let initializer = let_stmt.initializer()?;
let wrap_in_parens = match initializer.kind() {
ExprKind::LambdaExpr(_)
| ExprKind::IfExpr(_)
| ExprKind::LoopExpr(_)
| ExprKind::ForExpr(_)
| ExprKind::WhileExpr(_)
| ExprKind::ContinueExpr(_)
| ExprKind::BreakExpr(_)
| ExprKind::Label(_)
| ExprKind::ReturnExpr(_)
| ExprKind::MatchExpr(_)
| ExprKind::StructLit(_)
| ExprKind::CastExpr(_)
| ExprKind::PrefixExpr(_)
| ExprKind::RangeExpr(_)
| ExprKind::BinExpr(_) => true,
ExprKind::CallExpr(_)
| ExprKind::IndexExpr(_)
| ExprKind::MethodCallExpr(_)
| ExprKind::FieldExpr(_)
| ExprKind::TryExpr(_)
| ExprKind::RefExpr(_)
| ExprKind::Literal(_)
| ExprKind::TupleExpr(_)
| ExprKind::ArrayExpr(_)
| ExprKind::ParenExpr(_)
| ExprKind::PathExpr(_)
| ExprKind::BlockExpr(_) => false,
};
let delete_range = if let Some(whitespace) =
let_stmt.syntax().next_sibling().and_then(ast::Whitespace::cast)
{
TextRange::from_to(let_stmt.syntax().range().start(), whitespace.syntax().range().end())
} else {
let_stmt.syntax().range()
};
let init_str = if wrap_in_parens {
format!("({})", initializer.syntax().text().to_string())
} else {
initializer.syntax().text().to_string()
};
let function = function_from_child_node(ctx.db, ctx.frange.file_id, bind_pat.syntax())?;
let scope = function.scopes(ctx.db);
let refs = scope.find_all_refs(bind_pat);
ctx.add_action(
AssistId("inline_local_variable"),
"inline local variable",
move |edit: &mut AssistBuilder| {
edit.delete(delete_range);
for desc in refs {
edit.replace(desc.range, init_str.clone())
}
edit.set_cursor(delete_range.start())
},
);
ctx.build()
}
#[cfg(test)]
mod tests {
use crate::helpers::{check_assist, check_assist_not_applicable};
use super::*;
#[test]
fn test_inline_let_bind_literal_expr() {
check_assist(
inline_local_varialbe,
"
fn bar(a: usize) {}
fn foo() {
let a<|> = 1;
a + 1;
if a > 10 {
}
while a > 10 {
}
let b = a * 10;
bar(a);
}",
"
fn bar(a: usize) {}
fn foo() {
<|>1 + 1;
if 1 > 10 {
}
while 1 > 10 {
}
let b = 1 * 10;
bar(1);
}",
);
}
#[test]
fn test_inline_let_bind_bin_expr() {
check_assist(
inline_local_varialbe,
"
fn bar(a: usize) {}
fn foo() {
let a<|> = 1 + 1;
a + 1;
if a > 10 {
}
while a > 10 {
}
let b = a * 10;
bar(a);
}",
"
fn bar(a: usize) {}
fn foo() {
<|>(1 + 1) + 1;
if (1 + 1) > 10 {
}
while (1 + 1) > 10 {
}
let b = (1 + 1) * 10;
bar((1 + 1));
}",
);
}
#[test]
fn test_inline_let_bind_function_call_expr() {
check_assist(
inline_local_varialbe,
"
fn bar(a: usize) {}
fn foo() {
let a<|> = bar(1);
a + 1;
if a > 10 {
}
while a > 10 {
}
let b = a * 10;
bar(a);
}",
"
fn bar(a: usize) {}
fn foo() {
<|>bar(1) + 1;
if bar(1) > 10 {
}
while bar(1) > 10 {
}
let b = bar(1) * 10;
bar(bar(1));
}",
);
}
#[test]
fn test_inline_let_bind_cast_expr() {
check_assist(
inline_local_varialbe,
"
fn bar(a: usize): usize { a }
fn foo() {
let a<|> = bar(1) as u64;
a + 1;
if a > 10 {
}
while a > 10 {
}
let b = a * 10;
bar(a);
}",
"
fn bar(a: usize): usize { a }
fn foo() {
<|>(bar(1) as u64) + 1;
if (bar(1) as u64) > 10 {
}
while (bar(1) as u64) > 10 {
}
let b = (bar(1) as u64) * 10;
bar((bar(1) as u64));
}",
);
}
#[test]
fn test_inline_let_bind_block_expr() {
check_assist(
inline_local_varialbe,
"
fn foo() {
let a<|> = { 10 + 1 };
a + 1;
if a > 10 {
}
while a > 10 {
}
let b = a * 10;
bar(a);
}",
"
fn foo() {
<|>{ 10 + 1 } + 1;
if { 10 + 1 } > 10 {
}
while { 10 + 1 } > 10 {
}
let b = { 10 + 1 } * 10;
bar({ 10 + 1 });
}",
);
}
#[test]
fn test_inline_let_bind_paren_expr() {
check_assist(
inline_local_varialbe,
"
fn foo() {
let a<|> = ( 10 + 1 );
a + 1;
if a > 10 {
}
while a > 10 {
}
let b = a * 10;
bar(a);
}",
"
fn foo() {
<|>( 10 + 1 ) + 1;
if ( 10 + 1 ) > 10 {
}
while ( 10 + 1 ) > 10 {
}
let b = ( 10 + 1 ) * 10;
bar(( 10 + 1 ));
}",
);
}
#[test]
fn test_not_inline_mut_variable() {
check_assist_not_applicable(
inline_local_varialbe,
"
fn foo() {
let mut a<|> = 1 + 1;
a + 1;
}",
);
}
}

View file

@ -88,11 +88,12 @@ where
mod add_derive;
mod add_impl;
mod flip_comma;
mod flip_eq_operands;
mod flip_binexpr;
mod change_visibility;
mod fill_match_arms;
mod fill_struct_fields;
mod introduce_variable;
mod inline_local_variable;
mod replace_if_let_with_match;
mod split_import;
mod remove_dbg;
@ -107,7 +108,7 @@ fn all_assists<DB: HirDatabase>() -> &'static [fn(AssistCtx<DB>) -> Option<Assis
fill_match_arms::fill_match_arms,
fill_struct_fields::fill_struct_fields,
flip_comma::flip_comma,
flip_eq_operands::flip_eq_operands,
flip_binexpr::flip_binexpr,
introduce_variable::introduce_variable,
replace_if_let_with_match::replace_if_let_with_match,
split_import::split_import,
@ -115,6 +116,7 @@ fn all_assists<DB: HirDatabase>() -> &'static [fn(AssistCtx<DB>) -> Option<Assis
auto_import::auto_import,
add_missing_impl_members::add_missing_impl_members,
add_missing_impl_members::add_missing_default_members,
inline_local_variable::inline_local_varialbe,
]
}

View file

@ -14,7 +14,6 @@ indicatif = "0.11.0"
ra_syntax = { path = "../ra_syntax" }
ra_ide_api = { path = "../ra_ide_api" }
ra_ide_api_light = { path = "../ra_ide_api_light" }
tools = { path = "../tools" }
ra_batch = { path = "../ra_batch" }
ra_hir = { path = "../ra_hir" }

View file

@ -5,7 +5,7 @@ use std::{fs, io::Read, path::Path};
use clap::{App, Arg, SubCommand};
use join_to_string::join;
use ra_ide_api::{Analysis, FileRange};
use ra_ide_api_light::file_structure;
use ra_ide_api::file_structure;
use ra_syntax::{SourceFile, TextRange, TreeArc, AstNode};
use tools::collect_tests;
use flexi_logger::Logger;

View file

@ -5,7 +5,7 @@ version = "0.1.0"
authors = ["rust-analyzer developers"]
[dependencies]
salsa = "0.10.0"
salsa = "0.11.1"
relative-path = "0.4.0"
rustc-hash = "1.0"
parking_lot = "0.7.0"

View file

@ -1,8 +1,7 @@
use std::sync::Arc;
use relative_path::RelativePathBuf;
use ra_db::{CrateId, SourceRootId, Edition};
use ra_syntax::{ast::self, TreeArc, SyntaxNode};
use ra_syntax::{ast::self, TreeArc};
use crate::{
Name, ScopesWithSourceMap, Ty, HirFileId,
@ -14,9 +13,11 @@ use crate::{
adt::{EnumVariantId, StructFieldId, VariantDef},
generics::GenericParams,
docs::{Documentation, Docs, docs_from_ast},
ids::{FunctionId, StructId, EnumId, AstItemDef, ConstId, StaticId, TraitId, TypeId},
ids::{FunctionId, StructId, EnumId, AstItemDef, ConstId, StaticId, TraitId, TypeAliasId},
impl_block::ImplBlock,
resolve::Resolver,
diagnostics::DiagnosticSink,
traits::{TraitItem, TraitData},
};
/// hir::Crate describes a single crate. It's the main interface with which
@ -95,11 +96,6 @@ pub enum ModuleSource {
Module(TreeArc<ast::Module>),
}
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub enum Problem {
UnresolvedModule { candidate: RelativePathBuf },
}
impl Module {
/// Name of this module.
pub fn name(&self, db: &impl HirDatabase) -> Option<Name> {
@ -171,8 +167,24 @@ impl Module {
db.crate_def_map(self.krate)[self.module_id].scope.clone()
}
pub fn problems(&self, db: &impl HirDatabase) -> Vec<(TreeArc<SyntaxNode>, Problem)> {
self.problems_impl(db)
pub fn diagnostics(&self, db: &impl HirDatabase, sink: &mut DiagnosticSink) {
db.crate_def_map(self.krate).add_diagnostics(db, self.module_id, sink);
for decl in self.declarations(db) {
match decl {
crate::ModuleDef::Function(f) => f.diagnostics(db, sink),
crate::ModuleDef::Module(f) => f.diagnostics(db, sink),
_ => (),
}
}
for impl_block in self.impl_blocks(db) {
for item in impl_block.items(db) {
match item {
crate::ImplItem::Method(f) => f.diagnostics(db, sink),
_ => (),
}
}
}
}
pub fn resolver(&self, db: &impl HirDatabase) -> Resolver {
@ -519,6 +531,10 @@ impl Function {
let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r };
r
}
pub fn diagnostics(&self, db: &impl HirDatabase, sink: &mut DiagnosticSink) {
self.infer(db).add_diagnostics(db, *self, sink);
}
}
impl Docs for Function {
@ -634,6 +650,18 @@ impl Trait {
pub fn generic_params(&self, db: &impl DefDatabase) -> Arc<GenericParams> {
db.generic_params((*self).into())
}
pub fn name(self, db: &impl DefDatabase) -> Option<Name> {
self.trait_data(db).name().clone()
}
pub fn items(self, db: &impl DefDatabase) -> Vec<TraitItem> {
self.trait_data(db).items().to_vec()
}
pub(crate) fn trait_data(self, db: &impl DefDatabase) -> Arc<TraitData> {
db.trait_data(self)
}
}
impl Docs for Trait {
@ -644,7 +672,7 @@ impl Docs for Trait {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TypeAlias {
pub(crate) id: TypeId,
pub(crate) id: TypeAliasId,
}
impl TypeAlias {

View file

@ -1,18 +1,18 @@
use ra_db::FileId;
use ra_syntax::{ast, SyntaxNode, TreeArc, AstNode};
use ra_syntax::{ast, TreeArc};
use crate::{
Module, ModuleSource, Problem, Name,
Module, ModuleSource, Name, AstId,
nameres::{CrateModuleId, ImportId},
HirDatabase, DefDatabase,
HirFileId, SourceItemId,
HirFileId,
};
impl ModuleSource {
pub(crate) fn new(
db: &impl DefDatabase,
file_id: Option<FileId>,
decl_id: Option<SourceItemId>,
decl_id: Option<AstId<ast::Module>>,
) -> ModuleSource {
match (file_id, decl_id) {
(Some(file_id), _) => {
@ -20,8 +20,7 @@ impl ModuleSource {
ModuleSource::SourceFile(source_file)
}
(None, Some(item_id)) => {
let module = db.file_item(item_id);
let module = ast::Module::cast(&*module).unwrap();
let module = item_id.to_node(db);
assert!(module.item_list().is_some(), "expected inline module");
ModuleSource::Module(module.to_owned())
}
@ -55,7 +54,7 @@ impl Module {
let decl_id = def_map[self.module_id].declaration;
let file_id = def_map[self.module_id].definition;
let module_source = ModuleSource::new(db, file_id, decl_id);
let file_id = file_id.map(HirFileId::from).unwrap_or_else(|| decl_id.unwrap().file_id);
let file_id = file_id.map(HirFileId::from).unwrap_or_else(|| decl_id.unwrap().file_id());
(file_id, module_source)
}
@ -65,9 +64,8 @@ impl Module {
) -> Option<(HirFileId, TreeArc<ast::Module>)> {
let def_map = db.crate_def_map(self.krate);
let decl = def_map[self.module_id].declaration?;
let syntax_node = db.file_item(decl);
let ast = ast::Module::cast(&syntax_node).unwrap().to_owned();
Some((decl.file_id, ast))
let ast = decl.to_node(db);
Some((decl.file_id(), ast))
}
pub(crate) fn import_source_impl(
@ -76,7 +74,7 @@ impl Module {
import: ImportId,
) -> TreeArc<ast::PathSegment> {
let (file_id, source) = self.definition_source(db);
let (_, source_map) = db.raw_items_with_source_map(file_id.original_file(db));
let (_, source_map) = db.raw_items_with_source_map(file_id);
source_map.get(&source, import)
}
@ -108,19 +106,4 @@ impl Module {
let parent_id = def_map[self.module_id].parent?;
Some(self.with_module_id(parent_id))
}
pub(crate) fn problems_impl(
&self,
db: &impl HirDatabase,
) -> Vec<(TreeArc<SyntaxNode>, Problem)> {
let def_map = db.crate_def_map(self.krate);
let (my_file_id, _) = self.definition_source(db);
// FIXME: not entirely corret filterint by module
def_map
.problems()
.iter()
.filter(|(source_item_id, _problem)| my_file_id == source_item_id.file_id)
.map(|(source_item_id, problem)| (db.file_item(*source_item_id), problem.clone()))
.collect()
}
}

View file

@ -1,10 +1,10 @@
use std::sync::Arc;
use ra_syntax::{SyntaxNode, TreeArc, SourceFile};
use ra_db::{SourceDatabase, salsa, FileId};
use ra_db::{SourceDatabase, salsa};
use crate::{
HirFileId, SourceFileItems, SourceItemId, Crate, Module, HirInterner,
HirFileId, MacroDefId, AstIdMap, ErasedFileAstId, Crate, Module, HirInterner,
Function, FnSignature, ExprScopes, TypeAlias,
Struct, Enum, StructField,
Const, ConstSignature, Static,
@ -14,11 +14,15 @@ use crate::{
impl_block::{ModuleImplBlocks, ImplSourceMap},
generics::{GenericParams, GenericDef},
type_ref::TypeRef,
traits::TraitData, Trait, ty::TraitRef
};
#[salsa::query_group(DefDatabaseStorage)]
pub trait DefDatabase: SourceDatabase + AsRef<HirInterner> {
#[salsa::invoke(HirFileId::hir_parse)]
#[salsa::invoke(crate::ids::macro_def_query)]
fn macro_def(&self, macro_id: MacroDefId) -> Option<Arc<mbe::MacroRules>>;
#[salsa::invoke(HirFileId::hir_parse_query)]
fn hir_parse(&self, file_id: HirFileId) -> TreeArc<SourceFile>;
#[salsa::invoke(crate::adt::StructData::struct_data_query)]
@ -27,17 +31,23 @@ pub trait DefDatabase: SourceDatabase + AsRef<HirInterner> {
#[salsa::invoke(crate::adt::EnumData::enum_data_query)]
fn enum_data(&self, e: Enum) -> Arc<EnumData>;
#[salsa::invoke(crate::ids::SourceFileItems::file_items_query)]
fn file_items(&self, file_id: HirFileId) -> Arc<SourceFileItems>;
#[salsa::invoke(crate::traits::TraitData::trait_data_query)]
fn trait_data(&self, t: Trait) -> Arc<TraitData>;
#[salsa::invoke(crate::ids::SourceFileItems::file_item_query)]
fn file_item(&self, source_item_id: SourceItemId) -> TreeArc<SyntaxNode>;
#[salsa::invoke(crate::source_id::AstIdMap::ast_id_map_query)]
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
#[salsa::invoke(crate::source_id::AstIdMap::file_item_query)]
fn ast_id_to_node(&self, file_id: HirFileId, ast_id: ErasedFileAstId) -> TreeArc<SyntaxNode>;
#[salsa::invoke(RawItems::raw_items_query)]
fn raw_items(&self, file_id: FileId) -> Arc<RawItems>;
fn raw_items(&self, file_id: HirFileId) -> Arc<RawItems>;
#[salsa::invoke(RawItems::raw_items_with_source_map_query)]
fn raw_items_with_source_map(&self, file_id: FileId) -> (Arc<RawItems>, Arc<ImportSourceMap>);
fn raw_items_with_source_map(
&self,
file_id: HirFileId,
) -> (Arc<RawItems>, Arc<ImportSourceMap>);
#[salsa::invoke(CrateDefMap::crate_def_map_query)]
fn crate_def_map(&self, krate: Crate) -> Arc<CrateDefMap>;
@ -98,6 +108,9 @@ pub trait HirDatabase: DefDatabase {
#[salsa::invoke(crate::ty::method_resolution::CrateImplBlocks::impls_in_crate_query)]
fn impls_in_crate(&self, krate: Crate) -> Arc<CrateImplBlocks>;
#[salsa::invoke(crate::ty::method_resolution::implements)]
fn implements(&self, trait_ref: TraitRef) -> bool;
}
#[test]

View file

@ -0,0 +1,115 @@
use std::{fmt, any::Any};
use ra_syntax::{SyntaxNodePtr, TreeArc, AstPtr, TextRange, ast, SyntaxNode};
use relative_path::RelativePathBuf;
use crate::{HirFileId, HirDatabase};
/// Diagnostic defines hir API for errors and warnings.
///
/// It is used as a `dyn` object, which you can downcast to a concrete
/// diagnostic. DiagnosticSink are structured, meaning that they include rich
/// information which can be used by IDE to create fixes. DiagnosticSink are
/// expressed in terms of macro-expanded syntax tree nodes (so, it's a bad idea
/// to diagnostic in a salsa value).
///
/// Internally, various subsystems of hir produce diagnostics specific to a
/// subsystem (typically, an `enum`), which are safe to store in salsa but do not
/// include source locations. Such internal diagnostic are transformed into an
/// instance of `Diagnostic` on demand.
pub trait Diagnostic: Any + Send + Sync + fmt::Debug + 'static {
fn message(&self) -> String;
fn file(&self) -> HirFileId;
fn syntax_node_ptr(&self) -> SyntaxNodePtr;
fn highlight_range(&self) -> TextRange {
self.syntax_node_ptr().range()
}
fn as_any(&self) -> &(dyn Any + Send + 'static);
}
impl dyn Diagnostic {
pub fn syntax_node(&self, db: &impl HirDatabase) -> TreeArc<SyntaxNode> {
let source_file = db.hir_parse(self.file());
self.syntax_node_ptr().to_node(&source_file).to_owned()
}
pub fn downcast_ref<D: Diagnostic>(&self) -> Option<&D> {
self.as_any().downcast_ref()
}
}
pub struct DiagnosticSink<'a> {
callbacks: Vec<Box<dyn FnMut(&dyn Diagnostic) -> Result<(), ()> + 'a>>,
default_callback: Box<dyn FnMut(&dyn Diagnostic) + 'a>,
}
impl<'a> DiagnosticSink<'a> {
pub fn new(cb: impl FnMut(&dyn Diagnostic) + 'a) -> DiagnosticSink<'a> {
DiagnosticSink { callbacks: Vec::new(), default_callback: Box::new(cb) }
}
pub fn on<D: Diagnostic, F: FnMut(&D) + 'a>(mut self, mut cb: F) -> DiagnosticSink<'a> {
let cb = move |diag: &dyn Diagnostic| match diag.downcast_ref::<D>() {
Some(d) => {
cb(d);
Ok(())
}
None => Err(()),
};
self.callbacks.push(Box::new(cb));
self
}
pub(crate) fn push(&mut self, d: impl Diagnostic) {
let d: &dyn Diagnostic = &d;
for cb in self.callbacks.iter_mut() {
match cb(d) {
Ok(()) => return,
Err(()) => (),
}
}
(self.default_callback)(d)
}
}
#[derive(Debug)]
pub struct NoSuchField {
pub file: HirFileId,
pub field: AstPtr<ast::NamedField>,
}
impl Diagnostic for NoSuchField {
fn message(&self) -> String {
"no such field".to_string()
}
fn file(&self) -> HirFileId {
self.file
}
fn syntax_node_ptr(&self) -> SyntaxNodePtr {
self.field.into()
}
fn as_any(&self) -> &(Any + Send + 'static) {
self
}
}
#[derive(Debug)]
pub struct UnresolvedModule {
pub file: HirFileId,
pub decl: AstPtr<ast::Module>,
pub candidate: RelativePathBuf,
}
impl Diagnostic for UnresolvedModule {
fn message(&self) -> String {
"unresolved module".to_string()
}
fn file(&self) -> HirFileId {
self.file
}
fn syntax_node_ptr(&self) -> SyntaxNodePtr {
self.decl.into()
}
fn as_any(&self) -> &(Any + Send + 'static) {
self
}
}

View file

@ -5,7 +5,7 @@ use rustc_hash::FxHashMap;
use ra_arena::{Arena, RawId, impl_arena_id, map::ArenaMap};
use ra_syntax::{
SyntaxNodePtr, AstNode,
SyntaxNodePtr, AstPtr, AstNode,
ast::{self, LoopBodyOwner, ArgListOwner, NameOwner, LiteralFlavor, TypeAscriptionOwner}
};
@ -54,6 +54,7 @@ pub struct BodySourceMap {
expr_map_back: ArenaMap<ExprId, SyntaxNodePtr>,
pat_map: FxHashMap<SyntaxNodePtr, PatId>,
pat_map_back: ArenaMap<PatId, SyntaxNodePtr>,
field_map: FxHashMap<(ExprId, usize), AstPtr<ast::NamedField>>,
}
impl Body {
@ -138,6 +139,10 @@ impl BodySourceMap {
pub fn node_pat(&self, node: &ast::Pat) -> Option<PatId> {
self.pat_map.get(&SyntaxNodePtr::new(node.syntax())).cloned()
}
pub fn field_syntax(&self, expr: ExprId, field: usize) -> AstPtr<ast::NamedField> {
self.field_map[&(expr, field)].clone()
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
@ -629,8 +634,10 @@ impl ExprCollector {
}
ast::ExprKind::StructLit(e) => {
let path = e.path().and_then(Path::from_ast);
let mut field_ptrs = Vec::new();
let fields = if let Some(nfl) = e.named_field_list() {
nfl.fields()
.inspect(|field| field_ptrs.push(AstPtr::new(*field)))
.map(|field| StructLitField {
name: field
.name_ref()
@ -657,7 +664,11 @@ impl ExprCollector {
Vec::new()
};
let spread = e.spread().map(|s| self.collect_expr(s));
self.alloc_expr(Expr::StructLit { path, fields, spread }, syntax_ptr)
let res = self.alloc_expr(Expr::StructLit { path, fields, spread }, syntax_ptr);
for (i, ptr) in field_ptrs.into_iter().enumerate() {
self.source_map.field_map.insert((res, i), ptr);
}
res
}
ast::ExprKind::FieldExpr(e) => {
let expr = self.collect_expr_opt(e.expr());

View file

@ -1,16 +1,15 @@
use std::{
marker::PhantomData,
hash::{Hash, Hasher},
sync::Arc,
};
use ra_db::{LocationInterner, FileId};
use ra_syntax::{TreeArc, SyntaxNode, SourceFile, AstNode, SyntaxNodePtr, ast};
use ra_arena::{Arena, RawId, ArenaId, impl_arena_id};
use ra_syntax::{TreeArc, SourceFile, AstNode, ast};
use ra_arena::{RawId, ArenaId, impl_arena_id};
use mbe::MacroRules;
use crate::{
Module,
DefDatabase,
Module, DefDatabase, AstId, FileAstId,
};
#[derive(Debug, Default)]
@ -22,7 +21,7 @@ pub struct HirInterner {
consts: LocationInterner<ItemLoc<ast::ConstDef>, ConstId>,
statics: LocationInterner<ItemLoc<ast::StaticDef>, StaticId>,
traits: LocationInterner<ItemLoc<ast::TraitDef>, TraitId>,
types: LocationInterner<ItemLoc<ast::TypeAliasDef>, TypeId>,
types: LocationInterner<ItemLoc<ast::TypeAliasDef>, TypeAliasId>,
}
impl HirInterner {
@ -68,7 +67,7 @@ impl HirFileId {
HirFileIdRepr::File(file_id) => file_id,
HirFileIdRepr::Macro(macro_call_id) => {
let loc = macro_call_id.loc(db);
loc.source_item_id.file_id.original_file(db)
loc.ast_id.file_id().original_file(db)
}
}
}
@ -83,7 +82,10 @@ impl HirFileId {
}
}
pub(crate) fn hir_parse(db: &impl DefDatabase, file_id: HirFileId) -> TreeArc<SourceFile> {
pub(crate) fn hir_parse_query(
db: &impl DefDatabase,
file_id: HirFileId,
) -> TreeArc<SourceFile> {
match file_id.0 {
HirFileIdRepr::File(file_id) => db.parse(file_id),
HirFileIdRepr::Macro(macro_call_id) => {
@ -96,14 +98,10 @@ impl HirFileId {
fn parse_macro(db: &impl DefDatabase, macro_call_id: MacroCallId) -> Option<TreeArc<SourceFile>> {
let loc = macro_call_id.loc(db);
let syntax = db.file_item(loc.source_item_id);
let macro_call = ast::MacroCall::cast(&syntax).unwrap();
let macro_call = loc.ast_id.to_node(db);
let (macro_arg, _) = macro_call.token_tree().and_then(mbe::ast_to_token_tree)?;
let def_map = db.crate_def_map(loc.module.krate);
let (krate, macro_id) = def_map.resolve_macro(macro_call_id)?;
let def_map = db.crate_def_map(krate);
let macro_rules = &def_map[macro_id];
let macro_rules = db.macro_def(loc.def)?;
let tt = macro_rules.expand(&macro_arg).ok()?;
Some(mbe::token_tree_to_ast_item_list(&tt))
}
@ -126,6 +124,17 @@ impl From<MacroCallId> for HirFileId {
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroDefId(pub(crate) AstId<ast::MacroCall>);
pub(crate) fn macro_def_query(db: &impl DefDatabase, id: MacroDefId) -> Option<Arc<MacroRules>> {
let macro_call = id.0.to_node(db);
let arg = macro_call.token_tree()?;
let (tt, _) = mbe::ast_to_token_tree(arg)?;
let rules = MacroRules::parse(&tt).ok()?;
Some(Arc::new(rules))
}
/// `MacroCallId` identifies a particular macro invocation, like
/// `println!("Hello, {}", world)`.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -134,8 +143,8 @@ impl_arena_id!(MacroCallId);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct MacroCallLoc {
pub(crate) module: Module,
pub(crate) source_item_id: SourceItemId,
pub(crate) def: MacroDefId,
pub(crate) ast_id: AstId<ast::MacroCall>,
}
impl MacroCallId {
@ -145,7 +154,6 @@ impl MacroCallId {
}
impl MacroCallLoc {
#[allow(unused)]
pub(crate) fn id(&self, db: &impl AsRef<HirInterner>) -> MacroCallId {
db.as_ref().macros.loc2id(&self)
}
@ -154,26 +162,25 @@ impl MacroCallLoc {
#[derive(Debug)]
pub struct ItemLoc<N: AstNode> {
pub(crate) module: Module,
raw: SourceItemId,
_ty: PhantomData<N>,
ast_id: AstId<N>,
}
impl<N: AstNode> PartialEq for ItemLoc<N> {
fn eq(&self, other: &Self) -> bool {
self.module == other.module && self.raw == other.raw
self.module == other.module && self.ast_id == other.ast_id
}
}
impl<N: AstNode> Eq for ItemLoc<N> {}
impl<N: AstNode> Hash for ItemLoc<N> {
fn hash<H: Hasher>(&self, hasher: &mut H) {
self.module.hash(hasher);
self.raw.hash(hasher);
self.ast_id.hash(hasher);
}
}
impl<N: AstNode> Clone for ItemLoc<N> {
fn clone(&self) -> ItemLoc<N> {
ItemLoc { module: self.module, raw: self.raw, _ty: PhantomData }
ItemLoc { module: self.module, ast_id: self.ast_id }
}
}
@ -200,26 +207,19 @@ impl<'a, DB: DefDatabase> LocationCtx<&'a DB> {
pub(crate) trait AstItemDef<N: AstNode>: ArenaId + Clone {
fn interner(interner: &HirInterner) -> &LocationInterner<ItemLoc<N>, Self>;
fn from_ast(ctx: LocationCtx<&impl DefDatabase>, ast: &N) -> Self {
let items = ctx.db.file_items(ctx.file_id);
let item_id = items.id_of(ctx.file_id, ast.syntax());
Self::from_source_item_id_unchecked(ctx, item_id)
let items = ctx.db.ast_id_map(ctx.file_id);
let item_id = items.ast_id(ast);
Self::from_ast_id(ctx, item_id)
}
fn from_source_item_id_unchecked(
ctx: LocationCtx<&impl DefDatabase>,
item_id: SourceFileItemId,
) -> Self {
let raw = SourceItemId { file_id: ctx.file_id, item_id };
let loc = ItemLoc { module: ctx.module, raw, _ty: PhantomData };
fn from_ast_id(ctx: LocationCtx<&impl DefDatabase>, ast_id: FileAstId<N>) -> Self {
let loc = ItemLoc { module: ctx.module, ast_id: ast_id.with_file_id(ctx.file_id) };
Self::interner(ctx.db.as_ref()).loc2id(&loc)
}
fn source(self, db: &impl DefDatabase) -> (HirFileId, TreeArc<N>) {
let int = Self::interner(db.as_ref());
let loc = int.id2loc(self);
let syntax = db.file_item(loc.raw);
let ast =
N::cast(&syntax).unwrap_or_else(|| panic!("invalid ItemLoc: {:?}", loc.raw)).to_owned();
(loc.raw.file_id, ast)
let ast = loc.ast_id.to_node(db);
(loc.ast_id.file_id(), ast)
}
fn module(self, db: &impl DefDatabase) -> Module {
let int = Self::interner(db.as_ref());
@ -229,7 +229,7 @@ pub(crate) trait AstItemDef<N: AstNode>: ArenaId + Clone {
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct FunctionId(RawId);
pub(crate) struct FunctionId(RawId);
impl_arena_id!(FunctionId);
impl AstItemDef<ast::FnDef> for FunctionId {
fn interner(interner: &HirInterner) -> &LocationInterner<ItemLoc<ast::FnDef>, Self> {
@ -238,7 +238,7 @@ impl AstItemDef<ast::FnDef> for FunctionId {
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StructId(RawId);
pub(crate) struct StructId(RawId);
impl_arena_id!(StructId);
impl AstItemDef<ast::StructDef> for StructId {
fn interner(interner: &HirInterner) -> &LocationInterner<ItemLoc<ast::StructDef>, Self> {
@ -247,7 +247,7 @@ impl AstItemDef<ast::StructDef> for StructId {
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct EnumId(RawId);
pub(crate) struct EnumId(RawId);
impl_arena_id!(EnumId);
impl AstItemDef<ast::EnumDef> for EnumId {
fn interner(interner: &HirInterner) -> &LocationInterner<ItemLoc<ast::EnumDef>, Self> {
@ -256,7 +256,7 @@ impl AstItemDef<ast::EnumDef> for EnumId {
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ConstId(RawId);
pub(crate) struct ConstId(RawId);
impl_arena_id!(ConstId);
impl AstItemDef<ast::ConstDef> for ConstId {
fn interner(interner: &HirInterner) -> &LocationInterner<ItemLoc<ast::ConstDef>, Self> {
@ -265,7 +265,7 @@ impl AstItemDef<ast::ConstDef> for ConstId {
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StaticId(RawId);
pub(crate) struct StaticId(RawId);
impl_arena_id!(StaticId);
impl AstItemDef<ast::StaticDef> for StaticId {
fn interner(interner: &HirInterner) -> &LocationInterner<ItemLoc<ast::StaticDef>, Self> {
@ -274,7 +274,7 @@ impl AstItemDef<ast::StaticDef> for StaticId {
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TraitId(RawId);
pub(crate) struct TraitId(RawId);
impl_arena_id!(TraitId);
impl AstItemDef<ast::TraitDef> for TraitId {
fn interner(interner: &HirInterner) -> &LocationInterner<ItemLoc<ast::TraitDef>, Self> {
@ -283,117 +283,10 @@ impl AstItemDef<ast::TraitDef> for TraitId {
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TypeId(RawId);
impl_arena_id!(TypeId);
impl AstItemDef<ast::TypeAliasDef> for TypeId {
pub(crate) struct TypeAliasId(RawId);
impl_arena_id!(TypeAliasId);
impl AstItemDef<ast::TypeAliasDef> for TypeAliasId {
fn interner(interner: &HirInterner) -> &LocationInterner<ItemLoc<ast::TypeAliasDef>, Self> {
&interner.types
}
}
/// Identifier of item within a specific file. This is stable over reparses, so
/// it's OK to use it as a salsa key/value.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct SourceFileItemId(RawId);
impl_arena_id!(SourceFileItemId);
impl SourceFileItemId {
pub(crate) fn with_file_id(self, file_id: HirFileId) -> SourceItemId {
SourceItemId { file_id, item_id: self }
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct SourceItemId {
pub(crate) file_id: HirFileId,
pub(crate) item_id: SourceFileItemId,
}
/// Maps items' `SyntaxNode`s to `SourceFileItemId`s and back.
#[derive(Debug, PartialEq, Eq)]
pub struct SourceFileItems {
file_id: HirFileId,
arena: Arena<SourceFileItemId, SyntaxNodePtr>,
}
impl SourceFileItems {
pub(crate) fn file_items_query(
db: &impl DefDatabase,
file_id: HirFileId,
) -> Arc<SourceFileItems> {
let source_file = db.hir_parse(file_id);
Arc::new(SourceFileItems::from_source_file(&source_file, file_id))
}
pub(crate) fn file_item_query(
db: &impl DefDatabase,
source_item_id: SourceItemId,
) -> TreeArc<SyntaxNode> {
let source_file = db.hir_parse(source_item_id.file_id);
db.file_items(source_item_id.file_id)[source_item_id.item_id]
.to_node(&source_file)
.to_owned()
}
pub(crate) fn from_source_file(
source_file: &SourceFile,
file_id: HirFileId,
) -> SourceFileItems {
let mut res = SourceFileItems { file_id, arena: Arena::default() };
// By walking the tree in bread-first order we make sure that parents
// get lower ids then children. That is, adding a new child does not
// change parent's id. This means that, say, adding a new function to a
// trait does not change ids of top-level items, which helps caching.
bfs(source_file.syntax(), |it| {
if let Some(module_item) = ast::ModuleItem::cast(it) {
res.alloc(module_item.syntax());
} else if let Some(macro_call) = ast::MacroCall::cast(it) {
res.alloc(macro_call.syntax());
}
});
res
}
fn alloc(&mut self, item: &SyntaxNode) -> SourceFileItemId {
self.arena.alloc(SyntaxNodePtr::new(item))
}
pub(crate) fn id_of(&self, file_id: HirFileId, item: &SyntaxNode) -> SourceFileItemId {
assert_eq!(
self.file_id, file_id,
"SourceFileItems: wrong file, expected {:?}, got {:?}",
self.file_id, file_id
);
self.id_of_unchecked(item)
}
pub(crate) fn id_of_unchecked(&self, item: &SyntaxNode) -> SourceFileItemId {
let ptr = SyntaxNodePtr::new(item);
if let Some((id, _)) = self.arena.iter().find(|(_id, i)| **i == ptr) {
return id;
}
panic!(
"Can't find {:?} in SourceFileItems:\n{:?}",
item,
self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(),
);
}
}
impl std::ops::Index<SourceFileItemId> for SourceFileItems {
type Output = SyntaxNodePtr;
fn index(&self, idx: SourceFileItemId) -> &SyntaxNodePtr {
&self.arena[idx]
}
}
/// Walks the subtree in bfs order, calling `f` for each node.
fn bfs(node: &SyntaxNode, mut f: impl FnMut(&SyntaxNode)) {
let mut curr_layer = vec![node];
let mut next_layer = vec![];
while !curr_layer.is_empty() {
curr_layer.drain(..).for_each(|node| {
next_layer.extend(node.children());
f(node);
});
std::mem::swap(&mut curr_layer, &mut next_layer);
}
}

View file

@ -23,10 +23,12 @@ pub mod mock;
mod path;
pub mod source_binder;
mod source_id;
mod ids;
mod name;
mod nameres;
mod adt;
mod traits;
mod type_alias;
mod type_ref;
mod ty;
@ -35,6 +37,7 @@ mod expr;
mod generics;
mod docs;
mod resolve;
pub mod diagnostics;
mod code_model_api;
mod code_model_impl;
@ -45,13 +48,14 @@ mod marks;
use crate::{
db::{HirDatabase, DefDatabase},
name::{AsName, KnownName},
ids::{SourceItemId, SourceFileItems},
source_id::{FileAstId, AstId},
};
pub use self::{
path::{Path, PathKind},
name::Name,
ids::{HirFileId, MacroCallId, MacroCallLoc, HirInterner},
source_id::{AstIdMap, ErasedFileAstId},
ids::{HirFileId, MacroDefId, MacroCallId, MacroCallLoc, HirInterner},
nameres::{PerNs, Namespace},
ty::{Ty, ApplicationTy, TypeCtor, Substs, display::HirDisplay},
impl_block::{ImplBlock, ImplItem},
@ -63,7 +67,7 @@ pub use self::{
pub use self::code_model_api::{
Crate, CrateDependency,
Module, ModuleDef, ModuleSource, Problem,
Module, ModuleDef, ModuleSource,
Struct, Enum, EnumVariant,
Function, FnSignature,
StructField, FieldSource,

View file

@ -9,7 +9,7 @@ use relative_path::RelativePathBuf;
use test_utils::{parse_fixture, CURSOR_MARKER, extract_offset};
use rustc_hash::FxHashMap;
use crate::{db, HirInterner};
use crate::{db, HirInterner, diagnostics::DiagnosticSink};
pub const WORKSPACE: SourceRootId = SourceRootId(0);
@ -70,6 +70,22 @@ impl MockDatabase {
self.set_crate_graph(Arc::new(crate_graph))
}
pub fn diagnostics(&self) -> String {
let mut buf = String::from("\n");
let mut files: Vec<FileId> = self.files.values().map(|&it| it).collect();
files.sort();
for file in files {
let module = crate::source_binder::module_from_file_id(self, file).unwrap();
module.diagnostics(
self,
&mut DiagnosticSink::new(|d| {
buf += &format!("{:?}: {}\n", d.syntax_node(self).text(), d.message());
}),
)
}
buf
}
fn from_fixture(fixture: &str) -> (MockDatabase, Option<FilePosition>) {
let mut db = MockDatabase::default();

View file

@ -59,12 +59,16 @@ use rustc_hash::FxHashMap;
use ra_arena::{Arena, RawId, impl_arena_id};
use ra_db::{FileId, Edition};
use test_utils::tested_by;
use ra_syntax::ast;
use ra_prof::profile;
use crate::{
ModuleDef, Name, Crate, Module, Problem,
DefDatabase, Path, PathKind, HirFileId,
ids::{SourceItemId, SourceFileItemId, MacroCallId},
ModuleDef, Name, Crate, Module,
DefDatabase, Path, PathKind, HirFileId, Trait,
ids::MacroDefId,
diagnostics::DiagnosticSink,
nameres::diagnostics::DefDiagnostic,
AstId,
};
pub(crate) use self::raw::{RawItems, ImportId, ImportSourceMap};
@ -83,10 +87,8 @@ pub struct CrateDefMap {
extern_prelude: FxHashMap<Name, ModuleDef>,
root: CrateModuleId,
modules: Arena<CrateModuleId, ModuleData>,
macros: Arena<CrateMacroId, mbe::MacroRules>,
public_macros: FxHashMap<Name, CrateMacroId>,
macro_resolutions: FxHashMap<MacroCallId, (Crate, CrateMacroId)>,
problems: CrateDefMapProblems,
public_macros: FxHashMap<Name, MacroDefId>,
diagnostics: Vec<DefDiagnostic>,
}
impl std::ops::Index<CrateModuleId> for CrateDefMap {
@ -96,18 +98,6 @@ impl std::ops::Index<CrateModuleId> for CrateDefMap {
}
}
impl std::ops::Index<CrateMacroId> for CrateDefMap {
type Output = mbe::MacroRules;
fn index(&self, id: CrateMacroId) -> &mbe::MacroRules {
&self.macros[id]
}
}
/// An ID of a macro, **local** to a specific crate
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub(crate) struct CrateMacroId(RawId);
impl_arena_id!(CrateMacroId);
/// An ID of a module, **local** to a specific crate
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub(crate) struct CrateModuleId(RawId);
@ -119,28 +109,13 @@ pub(crate) struct ModuleData {
pub(crate) children: FxHashMap<Name, CrateModuleId>,
pub(crate) scope: ModuleScope,
/// None for root
pub(crate) declaration: Option<SourceItemId>,
pub(crate) declaration: Option<AstId<ast::Module>>,
/// None for inline modules.
///
/// Note that non-inline modules, by definition, live inside non-macro file.
pub(crate) definition: Option<FileId>,
}
#[derive(Default, Debug, PartialEq, Eq)]
pub(crate) struct CrateDefMapProblems {
problems: Vec<(SourceItemId, Problem)>,
}
impl CrateDefMapProblems {
fn add(&mut self, source_item_id: SourceItemId, problem: Problem) {
self.problems.push((source_item_id, problem))
}
pub(crate) fn iter<'a>(&'a self) -> impl Iterator<Item = (&'a SourceItemId, &'a Problem)> + 'a {
self.problems.iter().map(|(s, p)| (s, p))
}
}
#[derive(Debug, Default, PartialEq, Eq, Clone)]
pub struct ModuleScope {
items: FxHashMap<Name, Resolution>,
@ -153,6 +128,12 @@ impl ModuleScope {
pub fn get(&self, name: &Name) -> Option<&Resolution> {
self.items.get(name)
}
pub fn traits<'a>(&'a self) -> impl Iterator<Item = Trait> + 'a {
self.items.values().filter_map(|r| match r.def.take_types() {
Some(ModuleDef::Trait(t)) => Some(t),
_ => None,
})
}
}
#[derive(Debug, Clone, PartialEq, Eq, Default)]
@ -210,10 +191,8 @@ impl CrateDefMap {
prelude: None,
root,
modules,
macros: Arena::default(),
public_macros: FxHashMap::default(),
macro_resolutions: FxHashMap::default(),
problems: CrateDefMapProblems::default(),
diagnostics: Vec::new(),
}
};
let def_map = collector::collect_defs(db, def_map);
@ -224,10 +203,6 @@ impl CrateDefMap {
self.root
}
pub(crate) fn problems(&self) -> &CrateDefMapProblems {
&self.problems
}
pub(crate) fn mk_module(&self, module_id: CrateModuleId) -> Module {
Module { krate: self.krate, module_id }
}
@ -240,19 +215,20 @@ impl CrateDefMap {
&self.extern_prelude
}
pub(crate) fn resolve_macro(
pub(crate) fn add_diagnostics(
&self,
macro_call_id: MacroCallId,
) -> Option<(Crate, CrateMacroId)> {
self.macro_resolutions.get(&macro_call_id).map(|&it| it)
db: &impl DefDatabase,
module: CrateModuleId,
sink: &mut DiagnosticSink,
) {
self.diagnostics.iter().for_each(|it| it.add_to(db, module, sink))
}
pub(crate) fn find_module_by_source(
&self,
file_id: HirFileId,
decl_id: Option<SourceFileItemId>,
decl_id: Option<AstId<ast::Module>>,
) -> Option<CrateModuleId> {
let decl_id = decl_id.map(|it| it.with_file_id(file_id));
let (module_id, _module_data) = self.modules.iter().find(|(_module_id, module_data)| {
if decl_id.is_some() {
module_data.declaration == decl_id
@ -452,3 +428,46 @@ impl CrateDefMap {
}
}
}
mod diagnostics {
use relative_path::RelativePathBuf;
use ra_syntax::{AstPtr, ast};
use crate::{
AstId, DefDatabase,
nameres::CrateModuleId,
diagnostics::{DiagnosticSink, UnresolvedModule},
};
#[derive(Debug, PartialEq, Eq)]
pub(super) enum DefDiagnostic {
UnresolvedModule {
module: CrateModuleId,
declaration: AstId<ast::Module>,
candidate: RelativePathBuf,
},
}
impl DefDiagnostic {
pub(super) fn add_to(
&self,
db: &impl DefDatabase,
target_module: CrateModuleId,
sink: &mut DiagnosticSink,
) {
match self {
DefDiagnostic::UnresolvedModule { module, declaration, candidate } => {
if *module != target_module {
return;
}
let decl = declaration.to_node(db);
sink.push(UnresolvedModule {
file: declaration.file_id(),
decl: AstPtr::new(&decl),
candidate: candidate.clone(),
})
}
}
}
}
}

View file

@ -3,17 +3,22 @@ use rustc_hash::FxHashMap;
use relative_path::RelativePathBuf;
use test_utils::tested_by;
use ra_db::FileId;
use ra_syntax::ast;
use crate::{
Function, Module, Struct, Enum, Const, Static, Trait, TypeAlias,
DefDatabase, HirFileId, Name, Path, Problem, Crate,
DefDatabase, HirFileId, Name, Path,
KnownName,
nameres::{Resolution, PerNs, ModuleDef, ReachedFixedPoint, ResolveMode, raw},
ids::{AstItemDef, LocationCtx, MacroCallLoc, SourceItemId, MacroCallId},
nameres::{
Resolution, PerNs, ModuleDef, ReachedFixedPoint, ResolveMode,
CrateDefMap, CrateModuleId, ModuleData,
diagnostics::DefDiagnostic,
raw,
},
ids::{AstItemDef, LocationCtx, MacroCallLoc, MacroCallId, MacroDefId},
AstId,
};
use super::{CrateDefMap, CrateModuleId, ModuleData, CrateMacroId};
pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap {
// populate external prelude
for dep in def_map.krate.dependencies(db) {
@ -48,8 +53,8 @@ struct DefCollector<DB> {
def_map: CrateDefMap,
glob_imports: FxHashMap<CrateModuleId, Vec<(CrateModuleId, raw::ImportId)>>,
unresolved_imports: Vec<(CrateModuleId, raw::ImportId, raw::ImportData)>,
unexpanded_macros: Vec<(CrateModuleId, MacroCallId, Path, tt::Subtree)>,
global_macro_scope: FxHashMap<Name, CrateMacroId>,
unexpanded_macros: Vec<(CrateModuleId, AstId<ast::MacroCall>, Path)>,
global_macro_scope: FxHashMap<Name, MacroDefId>,
}
impl<'a, DB> DefCollector<&'a DB>
@ -59,7 +64,7 @@ where
fn collect(&mut self) {
let crate_graph = self.db.crate_graph();
let file_id = crate_graph.crate_root(self.def_map.krate.crate_id());
let raw_items = self.db.raw_items(file_id);
let raw_items = self.db.raw_items(file_id.into());
let module_id = self.def_map.root;
self.def_map.modules[module_id].definition = Some(file_id);
ModCollector {
@ -90,15 +95,12 @@ where
}
}
fn define_macro(&mut self, name: Name, tt: &tt::Subtree, export: bool) {
if let Ok(rules) = mbe::MacroRules::parse(tt) {
let macro_id = self.def_map.macros.alloc(rules);
fn define_macro(&mut self, name: Name, macro_id: MacroDefId, export: bool) {
if export {
self.def_map.public_macros.insert(name.clone(), macro_id);
}
self.global_macro_scope.insert(name, macro_id);
}
}
fn resolve_imports(&mut self) -> ReachedFixedPoint {
let mut imports = std::mem::replace(&mut self.unresolved_imports, Vec::new());
@ -293,7 +295,7 @@ where
let mut macros = std::mem::replace(&mut self.unexpanded_macros, Vec::new());
let mut resolved = Vec::new();
let mut res = ReachedFixedPoint::Yes;
macros.retain(|(module_id, call_id, path, tt)| {
macros.retain(|(module_id, ast_id, path)| {
if path.segments.len() != 2 {
return true;
}
@ -309,48 +311,24 @@ where
res = ReachedFixedPoint::No;
let def_map = self.db.crate_def_map(krate);
if let Some(macro_id) = def_map.public_macros.get(&path.segments[1].name).cloned() {
resolved.push((*module_id, *call_id, (krate, macro_id), tt.clone()));
let call_id = MacroCallLoc { def: macro_id, ast_id: *ast_id }.id(self.db);
resolved.push((*module_id, call_id));
}
false
});
for (module_id, macro_call_id, macro_def_id, arg) in resolved {
self.collect_macro_expansion(module_id, macro_call_id, macro_def_id, arg);
for (module_id, macro_call_id) in resolved {
self.collect_macro_expansion(module_id, macro_call_id);
}
res
}
fn collect_macro_expansion(
&mut self,
module_id: CrateModuleId,
macro_call_id: MacroCallId,
macro_def_id: (Crate, CrateMacroId),
macro_arg: tt::Subtree,
) {
let (macro_krate, macro_id) = macro_def_id;
let dm;
let rules = if macro_krate == self.def_map.krate {
&self.def_map[macro_id]
} else {
dm = self.db.crate_def_map(macro_krate);
&dm[macro_id]
};
if let Ok(expansion) = rules.expand(&macro_arg) {
self.def_map.macro_resolutions.insert(macro_call_id, macro_def_id);
// XXX: this **does not** go through a database, because we can't
// identify macro_call without adding the whole state of name resolution
// as a parameter to the query.
//
// So, we run the queries "manually" and we must ensure that
// `db.hir_parse(macro_call_id)` returns the same source_file.
fn collect_macro_expansion(&mut self, module_id: CrateModuleId, macro_call_id: MacroCallId) {
let file_id: HirFileId = macro_call_id.into();
let source_file = mbe::token_tree_to_ast_item_list(&expansion);
let raw_items = raw::RawItems::from_source_file(&source_file, file_id);
let raw_items = self.db.raw_items(file_id);
ModCollector { def_collector: &mut *self, file_id, module_id, raw_items: &raw_items }
.collect(raw_items.items())
}
}
fn finish(self) -> CrateDefMap {
self.def_map
@ -387,12 +365,9 @@ where
fn collect_module(&mut self, module: &raw::ModuleData) {
match module {
// inline module, just recurse
raw::ModuleData::Definition { name, items, source_item_id } => {
let module_id = self.push_child_module(
name.clone(),
source_item_id.with_file_id(self.file_id),
None,
);
raw::ModuleData::Definition { name, items, ast_id } => {
let module_id =
self.push_child_module(name.clone(), ast_id.with_file_id(self.file_id), None);
ModCollector {
def_collector: &mut *self.def_collector,
module_id,
@ -402,20 +377,13 @@ where
.collect(&*items);
}
// out of line module, resovle, parse and recurse
raw::ModuleData::Declaration { name, source_item_id } => {
let source_item_id = source_item_id.with_file_id(self.file_id);
raw::ModuleData::Declaration { name, ast_id } => {
let ast_id = ast_id.with_file_id(self.file_id);
let is_root = self.def_collector.def_map.modules[self.module_id].parent.is_none();
let (file_ids, problem) =
resolve_submodule(self.def_collector.db, self.file_id, name, is_root);
if let Some(problem) = problem {
self.def_collector.def_map.problems.add(source_item_id, problem)
}
if let Some(&file_id) = file_ids.first() {
let module_id =
self.push_child_module(name.clone(), source_item_id, Some(file_id));
let raw_items = self.def_collector.db.raw_items(file_id);
match resolve_submodule(self.def_collector.db, self.file_id, name, is_root) {
Ok(file_id) => {
let module_id = self.push_child_module(name.clone(), ast_id, Some(file_id));
let raw_items = self.def_collector.db.raw_items(file_id.into());
ModCollector {
def_collector: &mut *self.def_collector,
module_id,
@ -424,6 +392,14 @@ where
}
.collect(raw_items.items())
}
Err(candidate) => self.def_collector.def_map.diagnostics.push(
DefDiagnostic::UnresolvedModule {
module: self.module_id,
declaration: ast_id,
candidate,
},
),
};
}
}
}
@ -431,7 +407,7 @@ where
fn push_child_module(
&mut self,
name: Name,
declaration: SourceItemId,
declaration: AstId<ast::Module>,
definition: Option<FileId>,
) -> CrateModuleId {
let modules = &mut self.def_collector.def_map.modules;
@ -453,23 +429,24 @@ where
fn define_def(&mut self, def: &raw::DefData) {
let module = Module { krate: self.def_collector.def_map.krate, module_id: self.module_id };
let ctx = LocationCtx::new(self.def_collector.db, module, self.file_id.into());
macro_rules! id {
() => {
AstItemDef::from_source_item_id_unchecked(ctx, def.source_item_id)
macro_rules! def {
($kind:ident, $ast_id:ident) => {
$kind { id: AstItemDef::from_ast_id(ctx, $ast_id) }.into()
};
}
let name = def.name.clone();
let def: PerNs<ModuleDef> = match def.kind {
raw::DefKind::Function => PerNs::values(Function { id: id!() }.into()),
raw::DefKind::Struct => {
let s = Struct { id: id!() }.into();
raw::DefKind::Function(ast_id) => PerNs::values(def!(Function, ast_id)),
raw::DefKind::Struct(ast_id) => {
let s = def!(Struct, ast_id);
PerNs::both(s, s)
}
raw::DefKind::Enum => PerNs::types(Enum { id: id!() }.into()),
raw::DefKind::Const => PerNs::values(Const { id: id!() }.into()),
raw::DefKind::Static => PerNs::values(Static { id: id!() }.into()),
raw::DefKind::Trait => PerNs::types(Trait { id: id!() }.into()),
raw::DefKind::TypeAlias => PerNs::types(TypeAlias { id: id!() }.into()),
raw::DefKind::Enum(ast_id) => PerNs::types(def!(Enum, ast_id)),
raw::DefKind::Const(ast_id) => PerNs::values(def!(Const, ast_id)),
raw::DefKind::Static(ast_id) => PerNs::values(def!(Static, ast_id)),
raw::DefKind::Trait(ast_id) => PerNs::types(def!(Trait, ast_id)),
raw::DefKind::TypeAlias(ast_id) => PerNs::types(def!(TypeAlias, ast_id)),
};
let resolution = Resolution { def, import: None };
self.def_collector.update(self.module_id, None, &[(name, resolution)])
@ -479,39 +456,27 @@ where
// Case 1: macro rules, define a macro in crate-global mutable scope
if is_macro_rules(&mac.path) {
if let Some(name) = &mac.name {
self.def_collector.define_macro(name.clone(), &mac.arg, mac.export)
let macro_id = MacroDefId(mac.ast_id.with_file_id(self.file_id));
self.def_collector.define_macro(name.clone(), macro_id, mac.export)
}
return;
}
let source_item_id = SourceItemId { file_id: self.file_id, item_id: mac.source_item_id };
let macro_call_id = MacroCallLoc {
module: Module { krate: self.def_collector.def_map.krate, module_id: self.module_id },
source_item_id,
}
.id(self.def_collector.db);
let ast_id = mac.ast_id.with_file_id(self.file_id);
// Case 2: try to expand macro_rules from this crate, triggering
// recursive item collection.
if let Some(&macro_id) =
mac.path.as_ident().and_then(|name| self.def_collector.global_macro_scope.get(name))
{
self.def_collector.collect_macro_expansion(
self.module_id,
macro_call_id,
(self.def_collector.def_map.krate, macro_id),
mac.arg.clone(),
);
let macro_call_id = MacroCallLoc { def: macro_id, ast_id }.id(self.def_collector.db);
self.def_collector.collect_macro_expansion(self.module_id, macro_call_id);
return;
}
// Case 3: path to a macro from another crate, expand during name resolution
self.def_collector.unexpanded_macros.push((
self.module_id,
macro_call_id,
mac.path.clone(),
mac.arg.clone(),
))
self.def_collector.unexpanded_macros.push((self.module_id, ast_id, mac.path.clone()))
}
}
@ -524,7 +489,7 @@ fn resolve_submodule(
file_id: HirFileId,
name: &Name,
is_root: bool,
) -> (Vec<FileId>, Option<Problem>) {
) -> Result<FileId, RelativePathBuf> {
// FIXME: handle submodules of inline modules properly
let file_id = file_id.original_file(db);
let source_root_id = db.file_source_root(file_id);
@ -545,17 +510,10 @@ fn resolve_submodule(
candidates.push(file_dir_mod.clone());
};
let sr = db.source_root(source_root_id);
let points_to = candidates
.into_iter()
.filter_map(|path| sr.files.get(&path))
.map(|&it| it)
.collect::<Vec<_>>();
let problem = if points_to.is_empty() {
Some(Problem::UnresolvedModule {
candidate: if is_dir_owner { file_mod } else { file_dir_mod },
})
} else {
None
};
(points_to, problem)
let mut points_to = candidates.into_iter().filter_map(|path| sr.files.get(&path)).map(|&it| it);
// FIXME: handle ambiguity
match points_to.next() {
Some(file_id) => Ok(file_id),
None => Err(if is_dir_owner { file_mod } else { file_dir_mod }),
}
}

View file

@ -4,7 +4,6 @@ use std::{
};
use test_utils::tested_by;
use ra_db::FileId;
use ra_arena::{Arena, impl_arena_id, RawId, map::ArenaMap};
use ra_syntax::{
AstNode, SourceFile, AstPtr, TreeArc,
@ -13,9 +12,13 @@ use ra_syntax::{
use crate::{
DefDatabase, Name, AsName, Path, HirFileId, ModuleSource,
ids::{SourceFileItemId, SourceFileItems},
AstIdMap, FileAstId,
};
/// `RawItems` is a set of top-level items in a file (except for impls).
///
/// It is the input to name resolution algorithm. `RawItems` are not invalidated
/// on most edits.
#[derive(Debug, Default, PartialEq, Eq)]
pub struct RawItems {
modules: Arena<Module, ModuleData>,
@ -32,11 +35,11 @@ pub struct ImportSourceMap {
}
impl ImportSourceMap {
pub(crate) fn insert(&mut self, import: ImportId, segment: &ast::PathSegment) {
fn insert(&mut self, import: ImportId, segment: &ast::PathSegment) {
self.map.insert(import, AstPtr::new(segment))
}
pub fn get(&self, source: &ModuleSource, import: ImportId) -> TreeArc<ast::PathSegment> {
pub(crate) fn get(&self, source: &ModuleSource, import: ImportId) -> TreeArc<ast::PathSegment> {
let file = match source {
ModuleSource::SourceFile(file) => &*file,
ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(),
@ -47,40 +50,27 @@ impl ImportSourceMap {
}
impl RawItems {
pub(crate) fn raw_items_query(db: &impl DefDatabase, file_id: FileId) -> Arc<RawItems> {
pub(crate) fn raw_items_query(db: &impl DefDatabase, file_id: HirFileId) -> Arc<RawItems> {
db.raw_items_with_source_map(file_id).0
}
pub(crate) fn raw_items_with_source_map_query(
db: &impl DefDatabase,
file_id: FileId,
file_id: HirFileId,
) -> (Arc<RawItems>, Arc<ImportSourceMap>) {
let mut collector = RawItemsCollector {
raw_items: RawItems::default(),
source_file_items: db.file_items(file_id.into()),
source_ast_id_map: db.ast_id_map(file_id.into()),
source_map: ImportSourceMap::default(),
};
let source_file = db.parse(file_id);
let source_file = db.hir_parse(file_id);
collector.process_module(None, &*source_file);
(Arc::new(collector.raw_items), Arc::new(collector.source_map))
}
pub(crate) fn items(&self) -> &[RawItem] {
pub(super) fn items(&self) -> &[RawItem] {
&self.items
}
// We can't use queries during name resolution for fear of cycles, so this
// is a query-less variant of the above function.
pub(crate) fn from_source_file(source_file: &SourceFile, file_id: HirFileId) -> RawItems {
let source_file_items = SourceFileItems::from_source_file(source_file, file_id);
let mut collector = RawItemsCollector {
raw_items: RawItems::default(),
source_file_items: Arc::new(source_file_items),
source_map: ImportSourceMap::default(),
};
collector.process_module(None, &*source_file);
collector.raw_items
}
}
impl Index<Module> for RawItems {
@ -112,7 +102,7 @@ impl Index<Macro> for RawItems {
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub(crate) enum RawItem {
pub(super) enum RawItem {
Module(Module),
Import(ImportId),
Def(Def),
@ -120,13 +110,13 @@ pub(crate) enum RawItem {
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(crate) struct Module(RawId);
pub(super) struct Module(RawId);
impl_arena_id!(Module);
#[derive(Debug, PartialEq, Eq)]
pub(crate) enum ModuleData {
Declaration { name: Name, source_item_id: SourceFileItemId },
Definition { name: Name, source_item_id: SourceFileItemId, items: Vec<RawItem> },
pub(super) enum ModuleData {
Declaration { name: Name, ast_id: FileAstId<ast::Module> },
Definition { name: Name, ast_id: FileAstId<ast::Module>, items: Vec<RawItem> },
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -135,51 +125,49 @@ impl_arena_id!(ImportId);
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ImportData {
pub(crate) path: Path,
pub(crate) alias: Option<Name>,
pub(crate) is_glob: bool,
pub(crate) is_prelude: bool,
pub(crate) is_extern_crate: bool,
pub(super) path: Path,
pub(super) alias: Option<Name>,
pub(super) is_glob: bool,
pub(super) is_prelude: bool,
pub(super) is_extern_crate: bool,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(crate) struct Def(RawId);
pub(super) struct Def(RawId);
impl_arena_id!(Def);
#[derive(Debug, PartialEq, Eq)]
pub(crate) struct DefData {
pub(crate) source_item_id: SourceFileItemId,
pub(crate) name: Name,
pub(crate) kind: DefKind,
pub(super) struct DefData {
pub(super) name: Name,
pub(super) kind: DefKind,
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub(crate) enum DefKind {
Function,
Struct,
Enum,
Const,
Static,
Trait,
TypeAlias,
pub(super) enum DefKind {
Function(FileAstId<ast::FnDef>),
Struct(FileAstId<ast::StructDef>),
Enum(FileAstId<ast::EnumDef>),
Const(FileAstId<ast::ConstDef>),
Static(FileAstId<ast::StaticDef>),
Trait(FileAstId<ast::TraitDef>),
TypeAlias(FileAstId<ast::TypeAliasDef>),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(crate) struct Macro(RawId);
pub(super) struct Macro(RawId);
impl_arena_id!(Macro);
#[derive(Debug, PartialEq, Eq)]
pub(crate) struct MacroData {
pub(crate) source_item_id: SourceFileItemId,
pub(crate) path: Path,
pub(crate) name: Option<Name>,
pub(crate) arg: tt::Subtree,
pub(crate) export: bool,
pub(super) struct MacroData {
pub(super) ast_id: FileAstId<ast::MacroCall>,
pub(super) path: Path,
pub(super) name: Option<Name>,
pub(super) export: bool,
}
struct RawItemsCollector {
raw_items: RawItems,
source_file_items: Arc<SourceFileItems>,
source_ast_id_map: Arc<AstIdMap>,
source_map: ImportSourceMap,
}
@ -211,18 +199,31 @@ impl RawItemsCollector {
// impls don't participate in name resolution
return;
}
ast::ModuleItemKind::StructDef(it) => (DefKind::Struct, it.name()),
ast::ModuleItemKind::EnumDef(it) => (DefKind::Enum, it.name()),
ast::ModuleItemKind::FnDef(it) => (DefKind::Function, it.name()),
ast::ModuleItemKind::TraitDef(it) => (DefKind::Trait, it.name()),
ast::ModuleItemKind::TypeAliasDef(it) => (DefKind::TypeAlias, it.name()),
ast::ModuleItemKind::ConstDef(it) => (DefKind::Const, it.name()),
ast::ModuleItemKind::StaticDef(it) => (DefKind::Static, it.name()),
ast::ModuleItemKind::StructDef(it) => {
(DefKind::Struct(self.source_ast_id_map.ast_id(it)), it.name())
}
ast::ModuleItemKind::EnumDef(it) => {
(DefKind::Enum(self.source_ast_id_map.ast_id(it)), it.name())
}
ast::ModuleItemKind::FnDef(it) => {
(DefKind::Function(self.source_ast_id_map.ast_id(it)), it.name())
}
ast::ModuleItemKind::TraitDef(it) => {
(DefKind::Trait(self.source_ast_id_map.ast_id(it)), it.name())
}
ast::ModuleItemKind::TypeAliasDef(it) => {
(DefKind::TypeAlias(self.source_ast_id_map.ast_id(it)), it.name())
}
ast::ModuleItemKind::ConstDef(it) => {
(DefKind::Const(self.source_ast_id_map.ast_id(it)), it.name())
}
ast::ModuleItemKind::StaticDef(it) => {
(DefKind::Static(self.source_ast_id_map.ast_id(it)), it.name())
}
};
if let Some(name) = name {
let name = name.as_name();
let source_item_id = self.source_file_items.id_of_unchecked(item.syntax());
let def = self.raw_items.defs.alloc(DefData { name, kind, source_item_id });
let def = self.raw_items.defs.alloc(DefData { name, kind });
self.push_item(current_module, RawItem::Def(def))
}
}
@ -232,10 +233,9 @@ impl RawItemsCollector {
Some(it) => it.as_name(),
None => return,
};
let source_item_id = self.source_file_items.id_of_unchecked(module.syntax());
let ast_id = self.source_ast_id_map.ast_id(module);
if module.has_semi() {
let item =
self.raw_items.modules.alloc(ModuleData::Declaration { name, source_item_id });
let item = self.raw_items.modules.alloc(ModuleData::Declaration { name, ast_id });
self.push_item(current_module, RawItem::Module(item));
return;
}
@ -243,7 +243,7 @@ impl RawItemsCollector {
if let Some(item_list) = module.item_list() {
let item = self.raw_items.modules.alloc(ModuleData::Definition {
name,
source_item_id,
ast_id,
items: Vec::new(),
});
self.process_module(Some(item), item_list);
@ -291,18 +291,15 @@ impl RawItemsCollector {
}
fn add_macro(&mut self, current_module: Option<Module>, m: &ast::MacroCall) {
let (path, arg) = match (
m.path().and_then(Path::from_ast),
m.token_tree().and_then(mbe::ast_to_token_tree),
) {
(Some(path), Some((token_tree, _token_map))) => (path, token_tree),
let path = match m.path().and_then(Path::from_ast) {
Some(it) => it,
_ => return,
};
let name = m.name().map(|it| it.as_name());
let source_item_id = self.source_file_items.id_of_unchecked(m.syntax());
let ast_id = self.source_ast_id_map.ast_id(m);
let export = m.has_atom_attr("macro_export");
let m = self.raw_items.macros.alloc(MacroData { source_item_id, path, arg, name, export });
let m = self.raw_items.macros.alloc(MacroData { ast_id, path, name, export });
self.push_item(current_module, RawItem::Macro(m));
}

View file

@ -552,3 +552,22 @@ foo: v
"###
);
}
#[test]
fn unresolved_module_diagnostics() {
let diagnostics = MockDatabase::with_files(
r"
//- /lib.rs
mod foo;
mod bar;
mod baz {}
//- /foo.rs
",
)
.diagnostics();
assert_snapshot_matches!(diagnostics, @r###"
"mod bar;": unresolved module
"###
);
}

View file

@ -90,34 +90,44 @@ fn adding_inner_items_should_not_invalidate_def_map() {
);
}
// It would be awesome to make this work, but it's unclear how
#[test]
#[ignore]
fn typing_inside_a_function_inside_a_macro_should_not_invalidate_def_map() {
check_def_map_is_not_recomputed(
fn typing_inside_a_macro_should_not_invalidate_def_map() {
let (mut db, pos) = MockDatabase::with_position(
"
//- /lib.rs
macro_rules! m {
($ident:ident) => {
fn f() {
$ident + $ident;
};
}
}
mod foo;
use crate::foo::bar::Baz;
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
<|>
salsa::query_group! {
trait Baz {
fn foo() -> i32 { 1 + 1 }
}
}
",
"
salsa::query_group! {
trait Baz {
fn foo() -> i32 { 92 }
}
}
m!(X);
",
);
{
let events = db.log_executed(|| {
let module = crate::source_binder::module_from_file_id(&db, pos.file_id).unwrap();
let decls = module.declarations(&db);
assert_eq!(decls.len(), 1);
});
assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
}
db.set_file_text(pos.file_id, Arc::new("m!(Y);".to_string()));
{
let events = db.log_executed(|| {
let module = crate::source_binder::module_from_file_id(&db, pos.file_id).unwrap();
let decls = module.declarations(&db);
assert_eq!(decls.len(), 1);
});
assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
}
}

View file

@ -11,7 +11,7 @@ use crate::{
generics::GenericParams,
expr::{scope::{ExprScopes, ScopeId}, PatId, Body},
impl_block::ImplBlock,
path::Path,
path::Path, Trait
};
#[derive(Debug, Clone, Default)]
@ -175,6 +175,21 @@ impl Resolver {
names
}
pub(crate) fn traits_in_scope<'a>(&'a self) -> impl Iterator<Item = Trait> + 'a {
// FIXME prelude
self.scopes
.iter()
.rev()
.flat_map(|scope| {
match scope {
Scope::ModuleScope(m) => Some(m.crate_def_map[m.module_id].scope.traits()),
_ => None,
}
.into_iter()
})
.flatten()
}
fn module(&self) -> Option<(&CrateDefMap, CrateModuleId)> {
self.scopes.iter().rev().find_map(|scope| match scope {
Scope::ModuleScope(m) => Some((&*m.crate_def_map, m.module_id)),

View file

@ -15,8 +15,8 @@ use ra_syntax::{
use crate::{
HirDatabase, Function, Struct, Enum,
AsName, Module, HirFileId, Crate, Trait, Resolver,
ids::{LocationCtx, SourceFileItemId},
expr
ids::LocationCtx,
expr, AstId
};
/// Locates the module by `FileId`. Picks topmost module in the file.
@ -54,8 +54,8 @@ fn module_from_inline(
) -> Option<Module> {
assert!(!module.has_semi());
let file_id = file_id.into();
let file_items = db.file_items(file_id);
let item_id = file_items.id_of(file_id, module.syntax());
let ast_id_map = db.ast_id_map(file_id);
let item_id = ast_id_map.ast_id(module).with_file_id(file_id);
module_from_source(db, file_id, Some(item_id))
}
@ -75,7 +75,7 @@ pub fn module_from_child_node(
fn module_from_source(
db: &impl HirDatabase,
file_id: HirFileId,
decl_id: Option<SourceFileItemId>,
decl_id: Option<AstId<ast::Module>>,
) -> Option<Module> {
let source_root_id = db.file_source_root(file_id.as_original_file());
db.source_root_crates(source_root_id).iter().map(|&crate_id| Crate { crate_id }).find_map(

View file

@ -0,0 +1,150 @@
use std::{marker::PhantomData, sync::Arc, hash::{Hash, Hasher}};
use ra_arena::{Arena, RawId, impl_arena_id};
use ra_syntax::{SyntaxNodePtr, TreeArc, SyntaxNode, SourceFile, AstNode, ast};
use crate::{HirFileId, DefDatabase};
/// `AstId` points to an AST node in any file.
///
/// It is stable across reparses, and can be used as salsa key/value.
#[derive(Debug)]
pub(crate) struct AstId<N: AstNode> {
file_id: HirFileId,
file_ast_id: FileAstId<N>,
}
impl<N: AstNode> Clone for AstId<N> {
fn clone(&self) -> AstId<N> {
*self
}
}
impl<N: AstNode> Copy for AstId<N> {}
impl<N: AstNode> PartialEq for AstId<N> {
fn eq(&self, other: &Self) -> bool {
(self.file_id, self.file_ast_id) == (other.file_id, other.file_ast_id)
}
}
impl<N: AstNode> Eq for AstId<N> {}
impl<N: AstNode> Hash for AstId<N> {
fn hash<H: Hasher>(&self, hasher: &mut H) {
(self.file_id, self.file_ast_id).hash(hasher);
}
}
impl<N: AstNode> AstId<N> {
pub(crate) fn file_id(&self) -> HirFileId {
self.file_id
}
pub(crate) fn to_node(&self, db: &impl DefDatabase) -> TreeArc<N> {
let syntax_node = db.ast_id_to_node(self.file_id, self.file_ast_id.raw);
N::cast(&syntax_node).unwrap().to_owned()
}
}
/// `AstId` points to an AST node in a specific file.
#[derive(Debug)]
pub(crate) struct FileAstId<N: AstNode> {
raw: ErasedFileAstId,
_ty: PhantomData<N>,
}
impl<N: AstNode> Clone for FileAstId<N> {
fn clone(&self) -> FileAstId<N> {
*self
}
}
impl<N: AstNode> Copy for FileAstId<N> {}
impl<N: AstNode> PartialEq for FileAstId<N> {
fn eq(&self, other: &Self) -> bool {
self.raw == other.raw
}
}
impl<N: AstNode> Eq for FileAstId<N> {}
impl<N: AstNode> Hash for FileAstId<N> {
fn hash<H: Hasher>(&self, hasher: &mut H) {
self.raw.hash(hasher);
}
}
impl<N: AstNode> FileAstId<N> {
pub(crate) fn with_file_id(self, file_id: HirFileId) -> AstId<N> {
AstId { file_id, file_ast_id: self }
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ErasedFileAstId(RawId);
impl_arena_id!(ErasedFileAstId);
/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back.
#[derive(Debug, PartialEq, Eq)]
pub struct AstIdMap {
arena: Arena<ErasedFileAstId, SyntaxNodePtr>,
}
impl AstIdMap {
pub(crate) fn ast_id_map_query(db: &impl DefDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
let source_file = db.hir_parse(file_id);
Arc::new(AstIdMap::from_source_file(&source_file))
}
pub(crate) fn file_item_query(
db: &impl DefDatabase,
file_id: HirFileId,
ast_id: ErasedFileAstId,
) -> TreeArc<SyntaxNode> {
let source_file = db.hir_parse(file_id);
db.ast_id_map(file_id).arena[ast_id].to_node(&source_file).to_owned()
}
pub(crate) fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> {
let ptr = SyntaxNodePtr::new(item.syntax());
let raw = match self.arena.iter().find(|(_id, i)| **i == ptr) {
Some((it, _)) => it,
None => panic!(
"Can't find {:?} in AstIdMap:\n{:?}",
item.syntax(),
self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(),
),
};
FileAstId { raw, _ty: PhantomData }
}
fn from_source_file(source_file: &SourceFile) -> AstIdMap {
let mut res = AstIdMap { arena: Arena::default() };
// By walking the tree in bread-first order we make sure that parents
// get lower ids then children. That is, adding a new child does not
// change parent's id. This means that, say, adding a new function to a
// trait does not change ids of top-level items, which helps caching.
bfs(source_file.syntax(), |it| {
if let Some(module_item) = ast::ModuleItem::cast(it) {
res.alloc(module_item.syntax());
} else if let Some(macro_call) = ast::MacroCall::cast(it) {
res.alloc(macro_call.syntax());
}
});
res
}
fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId {
self.arena.alloc(SyntaxNodePtr::new(item))
}
}
/// Walks the subtree in bfs order, calling `f` for each node.
fn bfs(node: &SyntaxNode, mut f: impl FnMut(&SyntaxNode)) {
let mut curr_layer = vec![node];
let mut next_layer = vec![];
while !curr_layer.is_empty() {
curr_layer.drain(..).for_each(|node| {
next_layer.extend(node.children());
f(node);
});
std::mem::swap(&mut curr_layer, &mut next_layer);
}
}

View file

@ -0,0 +1,52 @@
//! HIR for trait definitions.
use std::sync::Arc;
use ra_syntax::ast::{self, NameOwner};
use crate::{Function, Const, TypeAlias, Name, DefDatabase, Trait, ids::LocationCtx, name::AsName};
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TraitData {
name: Option<Name>,
items: Vec<TraitItem>,
}
impl TraitData {
pub(crate) fn trait_data_query(db: &impl DefDatabase, tr: Trait) -> Arc<TraitData> {
let (file_id, node) = tr.source(db);
let name = node.name().map(|n| n.as_name());
let module = tr.module(db);
let ctx = LocationCtx::new(db, module, file_id);
let items = if let Some(item_list) = node.item_list() {
item_list
.impl_items()
.map(|item_node| match item_node.kind() {
ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(it) }.into(),
ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(it) }.into(),
ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(it) }.into(),
})
.collect()
} else {
Vec::new()
};
Arc::new(TraitData { name, items })
}
pub(crate) fn name(&self) -> &Option<Name> {
&self.name
}
pub(crate) fn items(&self) -> &[TraitItem] {
&self.items
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum TraitItem {
Function(Function),
Const(Const),
TypeAlias(TypeAlias),
// Existential
}
impl_froms!(TraitItem: Function, Const, TypeAlias);

View file

@ -14,7 +14,7 @@ pub(crate) mod display;
use std::sync::Arc;
use std::{fmt, mem};
use crate::{Name, AdtDef, type_ref::Mutability, db::HirDatabase};
use crate::{Name, AdtDef, type_ref::Mutability, db::HirDatabase, Trait};
pub(crate) use lower::{TypableDef, CallableDef, type_for_def, type_for_field, callable_item_sig};
pub(crate) use infer::{infer, InferenceResult, InferTy};
@ -91,7 +91,7 @@ pub enum TypeCtor {
/// A nominal type with (maybe 0) type parameters. This might be a primitive
/// type like `bool`, a struct, tuple, function pointer, reference or
/// several other things.
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub struct ApplicationTy {
pub ctor: TypeCtor,
pub parameters: Substs,
@ -103,7 +103,7 @@ pub struct ApplicationTy {
/// the same thing (but in a different way).
///
/// This should be cheap to clone.
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub enum Ty {
/// A nominal type with (maybe 0) type parameters. This might be a primitive
/// type like `bool`, a struct, tuple, function pointer, reference or
@ -132,7 +132,7 @@ pub enum Ty {
}
/// A list of substitutions for generic parameters.
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub struct Substs(Arc<[Ty]>);
impl Substs {
@ -169,6 +169,21 @@ impl Substs {
}
}
/// A trait with type parameters. This includes the `Self`, so this represents a concrete type implementing the trait.
/// Name to be bikeshedded: TraitBound? TraitImplements?
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub struct TraitRef {
/// FIXME name?
trait_: Trait,
substs: Substs,
}
impl TraitRef {
pub fn self_ty(&self) -> &Ty {
&self.substs.0[0]
}
}
/// A function signature as seen by type inference: Several parameter types and
/// one return type.
#[derive(Clone, PartialEq, Eq, Debug)]

View file

@ -36,7 +36,9 @@ use crate::{
path::{GenericArgs, GenericArg},
adt::VariantDef,
resolve::{Resolver, Resolution},
nameres::Namespace
nameres::Namespace,
ty::infer::diagnostics::InferenceDiagnostic,
diagnostics::DiagnosticSink,
};
use super::{Ty, TypableDef, Substs, primitive, op, FnSig, ApplicationTy, TypeCtor};
@ -96,6 +98,7 @@ pub struct InferenceResult {
field_resolutions: FxHashMap<ExprId, StructField>,
/// For each associated item record what it resolves to
assoc_resolutions: FxHashMap<ExprOrPatId, ImplItem>,
diagnostics: Vec<InferenceDiagnostic>,
pub(super) type_of_expr: ArenaMap<ExprId, Ty>,
pub(super) type_of_pat: ArenaMap<PatId, Ty>,
}
@ -113,6 +116,14 @@ impl InferenceResult {
pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<ImplItem> {
self.assoc_resolutions.get(&id.into()).map(|it| *it)
}
pub(crate) fn add_diagnostics(
&self,
db: &impl HirDatabase,
owner: Function,
sink: &mut DiagnosticSink,
) {
self.diagnostics.iter().for_each(|it| it.add_to(db, owner, sink))
}
}
impl Index<ExprId> for InferenceResult {
@ -143,6 +154,7 @@ struct InferenceContext<'a, D: HirDatabase> {
assoc_resolutions: FxHashMap<ExprOrPatId, ImplItem>,
type_of_expr: ArenaMap<ExprId, Ty>,
type_of_pat: ArenaMap<PatId, Ty>,
diagnostics: Vec<InferenceDiagnostic>,
/// The return type of the function being inferred.
return_ty: Ty,
}
@ -155,6 +167,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
assoc_resolutions: FxHashMap::default(),
type_of_expr: ArenaMap::default(),
type_of_pat: ArenaMap::default(),
diagnostics: Vec::default(),
var_unification_table: InPlaceUnificationTable::new(),
return_ty: Ty::Unknown, // set in collect_fn_signature
db,
@ -181,6 +194,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
assoc_resolutions: self.assoc_resolutions,
type_of_expr: expr_types,
type_of_pat: pat_types,
diagnostics: self.diagnostics,
}
}
@ -807,7 +821,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
}
Expr::MethodCall { receiver, args, method_name, generic_args } => {
let receiver_ty = self.infer_expr(*receiver, &Expectation::none());
let resolved = receiver_ty.clone().lookup_method(self.db, method_name);
let resolved =
receiver_ty.clone().lookup_method(self.db, method_name, &self.resolver);
let (derefed_receiver_ty, method_ty, def_generics) = match resolved {
Some((ty, func)) => {
self.write_method_resolution(tgt_expr, func);
@ -915,9 +930,18 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
Expr::StructLit { path, fields, spread } => {
let (ty, def_id) = self.resolve_variant(path.as_ref());
let substs = ty.substs().unwrap_or_else(Substs::empty);
for field in fields {
for (field_idx, field) in fields.into_iter().enumerate() {
let field_ty = def_id
.and_then(|it| it.field(self.db, &field.name))
.and_then(|it| match it.field(self.db, &field.name) {
Some(field) => Some(field),
None => {
self.diagnostics.push(InferenceDiagnostic::NoSuchField {
expr: tgt_expr,
field: field_idx,
});
None
}
})
.map_or(Ty::Unknown, |field| field.ty(self.db))
.subst(&substs);
self.infer_expr(field.expr, &Expectation::has_type(field_ty));
@ -1244,3 +1268,29 @@ impl Expectation {
Expectation { ty: Ty::Unknown }
}
}
mod diagnostics {
use crate::{expr::ExprId, diagnostics::{DiagnosticSink, NoSuchField}, HirDatabase, Function};
#[derive(Debug, PartialEq, Eq, Clone)]
pub(super) enum InferenceDiagnostic {
NoSuchField { expr: ExprId, field: usize },
}
impl InferenceDiagnostic {
pub(super) fn add_to(
&self,
db: &impl HirDatabase,
owner: Function,
sink: &mut DiagnosticSink,
) {
match self {
InferenceDiagnostic::NoSuchField { expr, field } => {
let (file, _) = owner.source(db);
let field = owner.body_source_map(db).field_syntax(*expr, *field);
sink.push(NoSuchField { file, field })
}
}
}
}
}

View file

@ -8,12 +8,12 @@ use rustc_hash::FxHashMap;
use crate::{
HirDatabase, Module, Crate, Name, Function, Trait,
ids::TraitId,
impl_block::{ImplId, ImplBlock, ImplItem},
ty::{Ty, TypeCtor},
nameres::CrateModuleId,
nameres::CrateModuleId, resolve::Resolver, traits::TraitItem
};
use super::{ TraitRef, Substs};
/// This is used as a key for indexing impls.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
@ -38,7 +38,7 @@ pub struct CrateImplBlocks {
/// To make sense of the CrateModuleIds, we need the source root.
krate: Crate,
impls: FxHashMap<TyFingerprint, Vec<(CrateModuleId, ImplId)>>,
impls_by_trait: FxHashMap<TraitId, Vec<(CrateModuleId, ImplId)>>,
impls_by_trait: FxHashMap<Trait, Vec<(CrateModuleId, ImplId)>>,
}
impl CrateImplBlocks {
@ -56,8 +56,7 @@ impl CrateImplBlocks {
&'a self,
tr: &Trait,
) -> impl Iterator<Item = ImplBlock> + 'a {
let id = tr.id;
self.impls_by_trait.get(&id).into_iter().flat_map(|i| i.iter()).map(
self.impls_by_trait.get(&tr).into_iter().flat_map(|i| i.iter()).map(
move |(module_id, impl_id)| {
let module = Module { krate: self.krate, module_id: *module_id };
ImplBlock::from_id(module, *impl_id)
@ -73,18 +72,18 @@ impl CrateImplBlocks {
let target_ty = impl_block.target_ty(db);
if let Some(tr) = impl_block.target_trait(db) {
self.impls_by_trait
.entry(tr)
.or_insert_with(Vec::new)
.push((module.module_id, impl_id));
} else {
if let Some(target_ty_fp) = TyFingerprint::for_impl(&target_ty) {
self.impls
.entry(target_ty_fp)
.or_insert_with(Vec::new)
.push((module.module_id, impl_id));
}
if let Some(tr) = impl_block.target_trait(db) {
self.impls_by_trait
.entry(tr.id)
.or_insert_with(Vec::new)
.push((module.module_id, impl_id));
}
}
@ -109,6 +108,20 @@ impl CrateImplBlocks {
}
}
/// Rudimentary check whether an impl exists for a given type and trait; this
/// will actually be done by chalk.
pub(crate) fn implements(db: &impl HirDatabase, trait_ref: TraitRef) -> bool {
// FIXME use all trait impls in the whole crate graph
let krate = trait_ref.trait_.module(db).krate(db);
let krate = match krate {
Some(krate) => krate,
None => return false,
};
let crate_impl_blocks = db.impls_in_crate(krate);
let mut impl_blocks = crate_impl_blocks.lookup_impl_blocks_for_trait(&trait_ref.trait_);
impl_blocks.any(|impl_block| &impl_block.target_ty(db) == trait_ref.self_ty())
}
fn def_crate(db: &impl HirDatabase, ty: &Ty) -> Option<Crate> {
match ty {
Ty::Apply(a_ty) => match a_ty.ctor {
@ -120,20 +133,64 @@ fn def_crate(db: &impl HirDatabase, ty: &Ty) -> Option<Crate> {
}
impl Ty {
// FIXME: cache this as a query?
// - if so, what signature? (TyFingerprint, Name)?
// - or maybe cache all names and def_ids of methods per fingerprint?
/// Look up the method with the given name, returning the actual autoderefed
/// receiver type (but without autoref applied yet).
pub fn lookup_method(self, db: &impl HirDatabase, name: &Name) -> Option<(Ty, Function)> {
self.iterate_methods(db, |ty, f| {
pub fn lookup_method(
self,
db: &impl HirDatabase,
name: &Name,
resolver: &Resolver,
) -> Option<(Ty, Function)> {
// FIXME: trait methods should be used before autoderefs
let inherent_method = self.clone().iterate_methods(db, |ty, f| {
let sig = f.signature(db);
if sig.name() == name && sig.has_self_param() {
Some((ty.clone(), f))
} else {
None
}
})
});
inherent_method.or_else(|| self.lookup_trait_method(db, name, resolver))
}
fn lookup_trait_method(
self,
db: &impl HirDatabase,
name: &Name,
resolver: &Resolver,
) -> Option<(Ty, Function)> {
let mut candidates = Vec::new();
for t in resolver.traits_in_scope() {
let data = t.trait_data(db);
for item in data.items() {
match item {
&TraitItem::Function(m) => {
let sig = m.signature(db);
if sig.name() == name && sig.has_self_param() {
candidates.push((t, m));
}
}
_ => {}
}
}
}
// FIXME:
// - we might not actually be able to determine fully that the type
// implements the trait here; it's enough if we (well, Chalk) determine
// that it's possible.
// - when the trait method is picked, we need to register an
// 'obligation' somewhere so that we later check that it's really
// implemented
// - both points go for additional requirements from where clauses as
// well (in fact, the 'implements' condition could just be considered a
// 'where Self: Trait' clause)
candidates.retain(|(t, _m)| {
let trait_ref = TraitRef { trait_: *t, substs: Substs::single(self.clone()) };
db.implements(trait_ref)
});
// FIXME if there's multiple candidates here, that's an ambiguity error
let (_chosen_trait, chosen_method) = candidates.first()?;
Some((self.clone(), *chosen_method))
}
// This would be nicer if it just returned an iterator, but that runs into

View file

@ -1272,8 +1272,8 @@ fn test() {
[241; 252) 'Struct::FOO': u32
[262; 263) 'y': u32
[266; 275) 'Enum::BAR': u32
[285; 286) 'z': u32
[289; 302) 'TraitTest::ID': u32"###
[285; 286) 'z': {unknown}
[289; 302) 'TraitTest::ID': {unknown}"###
);
}
@ -1918,9 +1918,9 @@ fn test() {
[110; 114) 'self': &{unknown}
[170; 228) '{ ...i128 }': ()
[176; 178) 'S1': S1
[176; 187) 'S1.method()': {unknown}
[176; 187) 'S1.method()': u32
[203; 205) 'S2': S2
[203; 214) 'S2.method()': {unknown}"###
[203; 214) 'S2.method()': i128"###
);
}
@ -1964,10 +1964,10 @@ mod bar_test {
[169; 173) 'self': &{unknown}
[300; 337) '{ ... }': ()
[310; 311) 'S': S
[310; 320) 'S.method()': {unknown}
[310; 320) 'S.method()': u32
[416; 454) '{ ... }': ()
[426; 427) 'S': S
[426; 436) 'S.method()': {unknown}"###
[426; 436) 'S.method()': i128"###
);
}
@ -2319,3 +2319,27 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events)
}
}
#[test]
fn no_such_field_diagnostics() {
let diagnostics = MockDatabase::with_files(
r"
//- /lib.rs
struct S { foo: i32, bar: () }
impl S {
fn new() -> S {
S {
foo: 92,
baz: 62,
}
}
}
",
)
.diagnostics();
assert_snapshot_matches!(diagnostics, @r###"
"baz: 62": no such field
"###
);
}

View file

@ -20,7 +20,6 @@ jemallocator = { version = "0.1.9", optional = true }
jemalloc-ctl = { version = "0.2.0", optional = true }
ra_syntax = { path = "../ra_syntax" }
ra_ide_api_light = { path = "../ra_ide_api_light" }
ra_text_edit = { path = "../ra_text_edit" }
ra_db = { path = "../ra_db" }
ra_fmt = { path = "../ra_fmt" }

View file

@ -220,8 +220,8 @@ impl RootDatabase {
self.query(ra_db::ParseQuery).sweep(sweep);
self.query(hir::db::HirParseQuery).sweep(sweep);
self.query(hir::db::FileItemsQuery).sweep(sweep);
self.query(hir::db::FileItemQuery).sweep(sweep);
self.query(hir::db::AstIdMapQuery).sweep(sweep);
self.query(hir::db::AstIdToNodeQuery).sweep(sweep);
self.query(hir::db::RawItemsWithSourceMapQuery).sweep(sweep);
self.query(hir::db::BodyWithSourceMapQuery).sweep(sweep);

View file

@ -1,10 +1,11 @@
use std::cell::RefCell;
use itertools::Itertools;
use hir::{Problem, source_binder};
use hir::{source_binder, diagnostics::{Diagnostic as _, DiagnosticSink}};
use ra_db::SourceDatabase;
use ra_syntax::{
Location, SourceFile, SyntaxKind, TextRange, SyntaxNode,
ast::{self, AstNode},
};
use ra_text_edit::{TextEdit, TextEditBuilder};
@ -26,11 +27,31 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
check_unnecessary_braces_in_use_statement(&mut res, file_id, node);
check_struct_shorthand_initialization(&mut res, file_id, node);
}
let res = RefCell::new(res);
let mut sink = DiagnosticSink::new(|d| {
res.borrow_mut().push(Diagnostic {
message: d.message(),
range: d.highlight_range(),
severity: Severity::Error,
fix: None,
})
})
.on::<hir::diagnostics::UnresolvedModule, _>(|d| {
let source_root = db.file_source_root(d.file().original_file(db));
let create_file = FileSystemEdit::CreateFile { source_root, path: d.candidate.clone() };
let fix = SourceChange::file_system_edit("create module", create_file);
res.borrow_mut().push(Diagnostic {
range: d.highlight_range(),
message: d.message(),
severity: Severity::Error,
fix: Some(fix),
})
});
if let Some(m) = source_binder::module_from_file_id(db, file_id) {
check_module(&mut res, db, file_id, m);
m.diagnostics(db, &mut sink);
};
res
drop(sink);
res.into_inner()
}
fn syntax_errors(acc: &mut Vec<Diagnostic>, source_file: &SourceFile) {
@ -128,34 +149,12 @@ fn check_struct_shorthand_initialization(
Some(())
}
fn check_module(
acc: &mut Vec<Diagnostic>,
db: &RootDatabase,
file_id: FileId,
module: hir::Module,
) {
let source_root = db.file_source_root(file_id);
for (name_node, problem) in module.problems(db) {
let diag = match problem {
Problem::UnresolvedModule { candidate } => {
let create_file =
FileSystemEdit::CreateFile { source_root, path: candidate.clone() };
let fix = SourceChange::file_system_edit("create module", create_file);
Diagnostic {
range: name_node.range(),
message: "unresolved module".to_string(),
severity: Severity::Error,
fix: Some(fix),
}
}
};
acc.push(diag)
}
}
#[cfg(test)]
mod tests {
use test_utils::assert_eq_text;
use insta::assert_debug_snapshot_matches;
use crate::mock_analysis::single_file;
use super::*;
@ -184,6 +183,34 @@ mod tests {
assert_eq_text!(after, &actual);
}
#[test]
fn test_unresolved_module_diagnostic() {
let (analysis, file_id) = single_file("mod foo;");
let diagnostics = analysis.diagnostics(file_id).unwrap();
assert_debug_snapshot_matches!(diagnostics, @r####"[
Diagnostic {
message: "unresolved module",
range: [0; 8),
fix: Some(
SourceChange {
label: "create module",
source_file_edits: [],
file_system_edits: [
CreateFile {
source_root: SourceRootId(
0
),
path: "foo.rs"
}
],
cursor_position: None
}
),
severity: Error
}
]"####);
}
#[test]
fn test_check_unnecessary_braces_in_use_statement() {
check_not_applicable(

View file

@ -6,9 +6,6 @@
//! database, and the `ra_hir` crate, where majority of the analysis happens.
//! However, IDE specific bits of the analysis (most notably completion) happen
//! in this crate.
//!
//! The sibling `ra_ide_api_light` handles those bits of IDE functionality
//! which are restricted to a single file and need only syntax.
// For proving that RootDatabase is RefUnwindSafe.
#![recursion_limit = "128"]
@ -33,10 +30,11 @@ mod impls;
mod assists;
mod diagnostics;
mod syntax_tree;
mod line_index;
mod folding_ranges;
mod line_index;
mod line_index_utils;
mod join_lines;
mod structure;
mod typing;
mod matching_brace;
@ -72,9 +70,10 @@ pub use crate::{
line_index_utils::translate_offset_with_edit,
folding_ranges::{Fold, FoldKind},
syntax_highlighting::HighlightedRange,
structure::{StructureNode, file_structure},
diagnostics::Severity,
};
pub use ra_ide_api_light::StructureNode;
pub use ra_db::{
Canceled, CrateGraph, CrateId, FileId, FilePosition, FileRange, SourceRootId,
Edition
@ -388,7 +387,7 @@ impl Analysis {
/// file outline.
pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> {
let file = self.db.parse(file_id);
ra_ide_api_light::file_structure(&file)
structure::file_structure(&file)
}
/// Returns the set of folding ranges.

View file

@ -28,7 +28,11 @@ pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> {
#[cfg(test)]
mod tests {
use crate::mock_analysis::analysis_and_position;
use crate::{
AnalysisChange, CrateGraph,
mock_analysis::{analysis_and_position, MockAnalysis},
Edition::Edition2018,
};
#[test]
fn test_resolve_parent_module() {
@ -59,4 +63,28 @@ mod tests {
let nav = analysis.parent_module(pos).unwrap().pop().unwrap();
nav.assert_match("baz MODULE FileId(1) [32; 44)");
}
#[test]
fn test_resolve_crate_root() {
let mock = MockAnalysis::with_files(
"
//- /bar.rs
mod foo;
//- /foo.rs
// empty <|>
",
);
let root_file = mock.id_of("/bar.rs");
let mod_file = mock.id_of("/foo.rs");
let mut host = mock.analysis_host();
assert!(host.analysis().crate_for(mod_file).unwrap().is_empty());
let mut crate_graph = CrateGraph::default();
let crate_id = crate_graph.add_crate_root(root_file, Edition2018);
let mut change = AnalysisChange::new();
change.set_crate_graph(crate_graph);
host.apply_change(change);
assert_eq!(host.analysis().crate_for(mod_file).unwrap(), vec![crate_id]);
}
}

View file

@ -216,9 +216,55 @@ mod tests {
use crate::{
mock_analysis::single_file_with_position,
mock_analysis::analysis_and_position,
FileId
FileId, ReferenceSearchResult
};
#[test]
fn test_find_all_refs_for_local() {
let code = r#"
fn main() {
let mut i = 1;
let j = 1;
i = i<|> + j;
{
i = 0;
}
i = 5;
}"#;
let refs = get_all_refs(code);
assert_eq!(refs.len(), 5);
}
#[test]
fn test_find_all_refs_for_param_inside() {
let code = r#"
fn foo(i : u32) -> u32 {
i<|>
}"#;
let refs = get_all_refs(code);
assert_eq!(refs.len(), 2);
}
#[test]
fn test_find_all_refs_for_fn_param() {
let code = r#"
fn foo(i<|> : u32) -> u32 {
i
}"#;
let refs = get_all_refs(code);
assert_eq!(refs.len(), 2);
}
fn get_all_refs(text: &str) -> ReferenceSearchResult {
let (analysis, position) = single_file_with_position(text);
analysis.find_all_refs(position).unwrap().unwrap()
}
#[test]
fn test_rename_for_local() {
test_rename(

View file

@ -1,7 +1,7 @@
---
created: "2019-02-05T22:03:50.763530100Z"
creator: insta@0.6.1
source: crates/ra_ide_api_light/src/structure.rs
source: crates/ra_ide_api/src/structure.rs
expression: structure
---
[

View file

@ -270,3 +270,61 @@ fn to_file_symbol(node: &SyntaxNode, file_id: FileId) -> Option<FileSymbol> {
container_name: None,
})
}
#[cfg(test)]
mod tests {
use ra_syntax::SmolStr;
use crate::{
navigation_target::NavigationTarget,
mock_analysis::single_file,
Query,
};
#[test]
fn test_world_symbols_with_no_container() {
let code = r#"
enum FooInner { }
"#;
let mut symbols = get_symbols_matching(code, "FooInner");
let s = symbols.pop().unwrap();
assert_eq!(s.name(), "FooInner");
assert!(s.container_name().is_none());
}
#[test]
fn test_world_symbols_include_container_name() {
let code = r#"
fn foo() {
enum FooInner { }
}
"#;
let mut symbols = get_symbols_matching(code, "FooInner");
let s = symbols.pop().unwrap();
assert_eq!(s.name(), "FooInner");
assert_eq!(s.container_name(), Some(&SmolStr::new("foo")));
let code = r#"
mod foo {
struct FooInner;
}
"#;
let mut symbols = get_symbols_matching(code, "FooInner");
let s = symbols.pop().unwrap();
assert_eq!(s.name(), "FooInner");
assert_eq!(s.container_name(), Some(&SmolStr::new("foo")));
}
fn get_symbols_matching(text: &str, query: &str) -> Vec<NavigationTarget> {
let (analysis, _) = single_file(text);
analysis.symbol_search(Query::new(query.into())).unwrap()
}
}

View file

@ -85,3 +85,260 @@ fn syntax_tree_for_token<T: AstToken>(node: &T, text_range: TextRange) -> Option
None
}
#[cfg(test)]
mod tests {
use crate::mock_analysis::{single_file, single_file_with_range};
#[test]
fn test_syntax_tree_without_range() {
// Basic syntax
let (analysis, file_id) = single_file(r#"fn foo() {}"#);
let syn = analysis.syntax_tree(file_id, None);
assert_eq!(
syn.trim(),
r#"
SOURCE_FILE@[0; 11)
FN_DEF@[0; 11)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
NAME@[3; 6)
IDENT@[3; 6) "foo"
PARAM_LIST@[6; 8)
L_PAREN@[6; 7)
R_PAREN@[7; 8)
WHITESPACE@[8; 9)
BLOCK@[9; 11)
L_CURLY@[9; 10)
R_CURLY@[10; 11)
"#
.trim()
);
let (analysis, file_id) = single_file(
r#"
fn test() {
assert!("
fn foo() {
}
", "");
}"#
.trim(),
);
let syn = analysis.syntax_tree(file_id, None);
assert_eq!(
syn.trim(),
r#"
SOURCE_FILE@[0; 60)
FN_DEF@[0; 60)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
NAME@[3; 7)
IDENT@[3; 7) "test"
PARAM_LIST@[7; 9)
L_PAREN@[7; 8)
R_PAREN@[8; 9)
WHITESPACE@[9; 10)
BLOCK@[10; 60)
L_CURLY@[10; 11)
WHITESPACE@[11; 16)
EXPR_STMT@[16; 58)
MACRO_CALL@[16; 57)
PATH@[16; 22)
PATH_SEGMENT@[16; 22)
NAME_REF@[16; 22)
IDENT@[16; 22) "assert"
EXCL@[22; 23)
TOKEN_TREE@[23; 57)
L_PAREN@[23; 24)
STRING@[24; 52)
COMMA@[52; 53)
WHITESPACE@[53; 54)
STRING@[54; 56)
R_PAREN@[56; 57)
SEMI@[57; 58)
WHITESPACE@[58; 59)
R_CURLY@[59; 60)
"#
.trim()
);
}
#[test]
fn test_syntax_tree_with_range() {
let (analysis, range) = single_file_with_range(r#"<|>fn foo() {}<|>"#.trim());
let syn = analysis.syntax_tree(range.file_id, Some(range.range));
assert_eq!(
syn.trim(),
r#"
FN_DEF@[0; 11)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
NAME@[3; 6)
IDENT@[3; 6) "foo"
PARAM_LIST@[6; 8)
L_PAREN@[6; 7)
R_PAREN@[7; 8)
WHITESPACE@[8; 9)
BLOCK@[9; 11)
L_CURLY@[9; 10)
R_CURLY@[10; 11)
"#
.trim()
);
let (analysis, range) = single_file_with_range(
r#"fn test() {
<|>assert!("
fn foo() {
}
", "");<|>
}"#
.trim(),
);
let syn = analysis.syntax_tree(range.file_id, Some(range.range));
assert_eq!(
syn.trim(),
r#"
EXPR_STMT@[16; 58)
MACRO_CALL@[16; 57)
PATH@[16; 22)
PATH_SEGMENT@[16; 22)
NAME_REF@[16; 22)
IDENT@[16; 22) "assert"
EXCL@[22; 23)
TOKEN_TREE@[23; 57)
L_PAREN@[23; 24)
STRING@[24; 52)
COMMA@[52; 53)
WHITESPACE@[53; 54)
STRING@[54; 56)
R_PAREN@[56; 57)
SEMI@[57; 58)
"#
.trim()
);
}
#[test]
fn test_syntax_tree_inside_string() {
let (analysis, range) = single_file_with_range(
r#"fn test() {
assert!("
<|>fn foo() {
}<|>
fn bar() {
}
", "");
}"#
.trim(),
);
let syn = analysis.syntax_tree(range.file_id, Some(range.range));
assert_eq!(
syn.trim(),
r#"
SOURCE_FILE@[0; 12)
FN_DEF@[0; 12)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
NAME@[3; 6)
IDENT@[3; 6) "foo"
PARAM_LIST@[6; 8)
L_PAREN@[6; 7)
R_PAREN@[7; 8)
WHITESPACE@[8; 9)
BLOCK@[9; 12)
L_CURLY@[9; 10)
WHITESPACE@[10; 11)
R_CURLY@[11; 12)
"#
.trim()
);
// With a raw string
let (analysis, range) = single_file_with_range(
r###"fn test() {
assert!(r#"
<|>fn foo() {
}<|>
fn bar() {
}
"#, "");
}"###
.trim(),
);
let syn = analysis.syntax_tree(range.file_id, Some(range.range));
assert_eq!(
syn.trim(),
r#"
SOURCE_FILE@[0; 12)
FN_DEF@[0; 12)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
NAME@[3; 6)
IDENT@[3; 6) "foo"
PARAM_LIST@[6; 8)
L_PAREN@[6; 7)
R_PAREN@[7; 8)
WHITESPACE@[8; 9)
BLOCK@[9; 12)
L_CURLY@[9; 10)
WHITESPACE@[10; 11)
R_CURLY@[11; 12)
"#
.trim()
);
// With a raw string
let (analysis, range) = single_file_with_range(
r###"fn test() {
assert!(r<|>#"
fn foo() {
}
fn bar() {
}"<|>#, "");
}"###
.trim(),
);
let syn = analysis.syntax_tree(range.file_id, Some(range.range));
assert_eq!(
syn.trim(),
r#"
SOURCE_FILE@[0; 25)
FN_DEF@[0; 12)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
NAME@[3; 6)
IDENT@[3; 6) "foo"
PARAM_LIST@[6; 8)
L_PAREN@[6; 7)
R_PAREN@[7; 8)
WHITESPACE@[8; 9)
BLOCK@[9; 12)
L_CURLY@[9; 10)
WHITESPACE@[10; 11)
R_CURLY@[11; 12)
WHITESPACE@[12; 13)
FN_DEF@[13; 25)
FN_KW@[13; 15)
WHITESPACE@[15; 16)
NAME@[16; 19)
IDENT@[16; 19) "bar"
PARAM_LIST@[19; 21)
L_PAREN@[19; 20)
R_PAREN@[20; 21)
WHITESPACE@[21; 22)
BLOCK@[22; 25)
L_CURLY@[22; 23)
WHITESPACE@[23; 24)
R_CURLY@[24; 25)
"#
.trim()
);
}
}

View file

@ -1,392 +0,0 @@
use insta::assert_debug_snapshot_matches;
use ra_ide_api::{
mock_analysis::{single_file, single_file_with_position, single_file_with_range, MockAnalysis},
AnalysisChange, CrateGraph, Edition::Edition2018, Query, NavigationTarget,
ReferenceSearchResult,
};
use ra_syntax::SmolStr;
#[test]
fn test_unresolved_module_diagnostic() {
let (analysis, file_id) = single_file("mod foo;");
let diagnostics = analysis.diagnostics(file_id).unwrap();
assert_debug_snapshot_matches!("unresolved_module_diagnostic", &diagnostics);
}
// FIXME: move this test to hir
#[test]
fn test_unresolved_module_diagnostic_no_diag_for_inline_mode() {
let (analysis, file_id) = single_file("mod foo {}");
let diagnostics = analysis.diagnostics(file_id).unwrap();
assert!(diagnostics.is_empty());
}
#[test]
fn test_resolve_crate_root() {
let mock = MockAnalysis::with_files(
"
//- /bar.rs
mod foo;
//- /foo.rs
// empty <|>
",
);
let root_file = mock.id_of("/bar.rs");
let mod_file = mock.id_of("/foo.rs");
let mut host = mock.analysis_host();
assert!(host.analysis().crate_for(mod_file).unwrap().is_empty());
let mut crate_graph = CrateGraph::default();
let crate_id = crate_graph.add_crate_root(root_file, Edition2018);
let mut change = AnalysisChange::new();
change.set_crate_graph(crate_graph);
host.apply_change(change);
assert_eq!(host.analysis().crate_for(mod_file).unwrap(), vec![crate_id]);
}
fn get_all_refs(text: &str) -> ReferenceSearchResult {
let (analysis, position) = single_file_with_position(text);
analysis.find_all_refs(position).unwrap().unwrap()
}
fn get_symbols_matching(text: &str, query: &str) -> Vec<NavigationTarget> {
let (analysis, _) = single_file(text);
analysis.symbol_search(Query::new(query.into())).unwrap()
}
#[test]
fn test_find_all_refs_for_local() {
let code = r#"
fn main() {
let mut i = 1;
let j = 1;
i = i<|> + j;
{
i = 0;
}
i = 5;
}"#;
let refs = get_all_refs(code);
assert_eq!(refs.len(), 5);
}
#[test]
fn test_find_all_refs_for_param_inside() {
let code = r#"
fn foo(i : u32) -> u32 {
i<|>
}"#;
let refs = get_all_refs(code);
assert_eq!(refs.len(), 2);
}
#[test]
fn test_find_all_refs_for_fn_param() {
let code = r#"
fn foo(i<|> : u32) -> u32 {
i
}"#;
let refs = get_all_refs(code);
assert_eq!(refs.len(), 2);
}
#[test]
fn test_world_symbols_with_no_container() {
let code = r#"
enum FooInner { }
"#;
let mut symbols = get_symbols_matching(code, "FooInner");
let s = symbols.pop().unwrap();
assert_eq!(s.name(), "FooInner");
assert!(s.container_name().is_none());
}
#[test]
fn test_world_symbols_include_container_name() {
let code = r#"
fn foo() {
enum FooInner { }
}
"#;
let mut symbols = get_symbols_matching(code, "FooInner");
let s = symbols.pop().unwrap();
assert_eq!(s.name(), "FooInner");
assert_eq!(s.container_name(), Some(&SmolStr::new("foo")));
let code = r#"
mod foo {
struct FooInner;
}
"#;
let mut symbols = get_symbols_matching(code, "FooInner");
let s = symbols.pop().unwrap();
assert_eq!(s.name(), "FooInner");
assert_eq!(s.container_name(), Some(&SmolStr::new("foo")));
}
#[test]
fn test_syntax_tree_without_range() {
// Basic syntax
let (analysis, file_id) = single_file(r#"fn foo() {}"#);
let syn = analysis.syntax_tree(file_id, None);
assert_eq!(
syn.trim(),
r#"
SOURCE_FILE@[0; 11)
FN_DEF@[0; 11)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
NAME@[3; 6)
IDENT@[3; 6) "foo"
PARAM_LIST@[6; 8)
L_PAREN@[6; 7)
R_PAREN@[7; 8)
WHITESPACE@[8; 9)
BLOCK@[9; 11)
L_CURLY@[9; 10)
R_CURLY@[10; 11)
"#
.trim()
);
let (analysis, file_id) = single_file(
r#"
fn test() {
assert!("
fn foo() {
}
", "");
}"#
.trim(),
);
let syn = analysis.syntax_tree(file_id, None);
assert_eq!(
syn.trim(),
r#"
SOURCE_FILE@[0; 60)
FN_DEF@[0; 60)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
NAME@[3; 7)
IDENT@[3; 7) "test"
PARAM_LIST@[7; 9)
L_PAREN@[7; 8)
R_PAREN@[8; 9)
WHITESPACE@[9; 10)
BLOCK@[10; 60)
L_CURLY@[10; 11)
WHITESPACE@[11; 16)
EXPR_STMT@[16; 58)
MACRO_CALL@[16; 57)
PATH@[16; 22)
PATH_SEGMENT@[16; 22)
NAME_REF@[16; 22)
IDENT@[16; 22) "assert"
EXCL@[22; 23)
TOKEN_TREE@[23; 57)
L_PAREN@[23; 24)
STRING@[24; 52)
COMMA@[52; 53)
WHITESPACE@[53; 54)
STRING@[54; 56)
R_PAREN@[56; 57)
SEMI@[57; 58)
WHITESPACE@[58; 59)
R_CURLY@[59; 60)
"#
.trim()
);
}
#[test]
fn test_syntax_tree_with_range() {
let (analysis, range) = single_file_with_range(r#"<|>fn foo() {}<|>"#.trim());
let syn = analysis.syntax_tree(range.file_id, Some(range.range));
assert_eq!(
syn.trim(),
r#"
FN_DEF@[0; 11)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
NAME@[3; 6)
IDENT@[3; 6) "foo"
PARAM_LIST@[6; 8)
L_PAREN@[6; 7)
R_PAREN@[7; 8)
WHITESPACE@[8; 9)
BLOCK@[9; 11)
L_CURLY@[9; 10)
R_CURLY@[10; 11)
"#
.trim()
);
let (analysis, range) = single_file_with_range(
r#"fn test() {
<|>assert!("
fn foo() {
}
", "");<|>
}"#
.trim(),
);
let syn = analysis.syntax_tree(range.file_id, Some(range.range));
assert_eq!(
syn.trim(),
r#"
EXPR_STMT@[16; 58)
MACRO_CALL@[16; 57)
PATH@[16; 22)
PATH_SEGMENT@[16; 22)
NAME_REF@[16; 22)
IDENT@[16; 22) "assert"
EXCL@[22; 23)
TOKEN_TREE@[23; 57)
L_PAREN@[23; 24)
STRING@[24; 52)
COMMA@[52; 53)
WHITESPACE@[53; 54)
STRING@[54; 56)
R_PAREN@[56; 57)
SEMI@[57; 58)
"#
.trim()
);
}
#[test]
fn test_syntax_tree_inside_string() {
let (analysis, range) = single_file_with_range(
r#"fn test() {
assert!("
<|>fn foo() {
}<|>
fn bar() {
}
", "");
}"#
.trim(),
);
let syn = analysis.syntax_tree(range.file_id, Some(range.range));
assert_eq!(
syn.trim(),
r#"
SOURCE_FILE@[0; 12)
FN_DEF@[0; 12)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
NAME@[3; 6)
IDENT@[3; 6) "foo"
PARAM_LIST@[6; 8)
L_PAREN@[6; 7)
R_PAREN@[7; 8)
WHITESPACE@[8; 9)
BLOCK@[9; 12)
L_CURLY@[9; 10)
WHITESPACE@[10; 11)
R_CURLY@[11; 12)
"#
.trim()
);
// With a raw string
let (analysis, range) = single_file_with_range(
r###"fn test() {
assert!(r#"
<|>fn foo() {
}<|>
fn bar() {
}
"#, "");
}"###
.trim(),
);
let syn = analysis.syntax_tree(range.file_id, Some(range.range));
assert_eq!(
syn.trim(),
r#"
SOURCE_FILE@[0; 12)
FN_DEF@[0; 12)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
NAME@[3; 6)
IDENT@[3; 6) "foo"
PARAM_LIST@[6; 8)
L_PAREN@[6; 7)
R_PAREN@[7; 8)
WHITESPACE@[8; 9)
BLOCK@[9; 12)
L_CURLY@[9; 10)
WHITESPACE@[10; 11)
R_CURLY@[11; 12)
"#
.trim()
);
// With a raw string
let (analysis, range) = single_file_with_range(
r###"fn test() {
assert!(r<|>#"
fn foo() {
}
fn bar() {
}"<|>#, "");
}"###
.trim(),
);
let syn = analysis.syntax_tree(range.file_id, Some(range.range));
assert_eq!(
syn.trim(),
r#"
SOURCE_FILE@[0; 25)
FN_DEF@[0; 12)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
NAME@[3; 6)
IDENT@[3; 6) "foo"
PARAM_LIST@[6; 8)
L_PAREN@[6; 7)
R_PAREN@[7; 8)
WHITESPACE@[8; 9)
BLOCK@[9; 12)
L_CURLY@[9; 10)
WHITESPACE@[10; 11)
R_CURLY@[11; 12)
WHITESPACE@[12; 13)
FN_DEF@[13; 25)
FN_KW@[13; 15)
WHITESPACE@[15; 16)
NAME@[16; 19)
IDENT@[16; 19) "bar"
PARAM_LIST@[19; 21)
L_PAREN@[19; 20)
R_PAREN@[20; 21)
WHITESPACE@[21; 22)
BLOCK@[22; 25)
L_CURLY@[22; 23)
WHITESPACE@[23; 24)
R_CURLY@[24; 25)
"#
.trim()
);
}

View file

@ -1,28 +0,0 @@
---
created: "2019-01-22T14:45:01.486985900+00:00"
creator: insta@0.4.0
expression: "&diagnostics"
source: "crates\\ra_ide_api\\tests\\test\\main.rs"
---
[
Diagnostic {
message: "unresolved module",
range: [0; 8),
fix: Some(
SourceChange {
label: "create module",
source_file_edits: [],
file_system_edits: [
CreateFile {
source_root: SourceRootId(
0
),
path: "foo.rs"
}
],
cursor_position: None
}
),
severity: Error
}
]

View file

@ -1,26 +0,0 @@
[package]
edition = "2018"
name = "ra_ide_api_light"
version = "0.1.0"
authors = ["rust-analyzer developers"]
publish = false
[dependencies]
itertools = "0.8.0"
superslice = "1.0.0"
join_to_string = "0.1.1"
rustc-hash = "1.0"
ra_syntax = { path = "../ra_syntax" }
ra_text_edit = { path = "../ra_text_edit" }
ra_fmt = { path = "../ra_fmt" }
[dev-dependencies]
test_utils = { path = "../test_utils" }
insta = "0.7.0"
[dev-dependencies.proptest]
version = "0.9.0"
# Disable `fork` feature to allow compiling on webassembly
default-features = false
features = ["std", "bit-set", "break-dead-code"]

View file

@ -1,12 +0,0 @@
//! This crate provides those IDE features which use only a single file.
//!
//! This usually means functions which take syntax tree as an input and produce
//! an edit or some auxiliary info.
mod structure;
use ra_syntax::TextRange;
pub use crate::{
structure::{file_structure, StructureNode},
};

View file

@ -184,6 +184,10 @@ fn name_ref(p: &mut Parser) {
let m = p.start();
p.bump();
m.complete(p, NAME_REF);
} else if p.at(SELF_KW) {
let m = p.start();
p.bump();
m.complete(p, SELF_KW);
} else {
p.err_and_bump("expected identifier");
}

View file

@ -4061,7 +4061,11 @@ impl ast::NameOwner for TraitDef {}
impl ast::AttrsOwner for TraitDef {}
impl ast::DocCommentsOwner for TraitDef {}
impl ast::TypeParamsOwner for TraitDef {}
impl TraitDef {}
impl TraitDef {
pub fn item_list(&self) -> Option<&ItemList> {
super::child_opt(self)
}
}
// TrueKw
#[derive(Debug, PartialEq, Eq, Hash)]

View file

@ -292,7 +292,10 @@ Grammar(
], options: [["variant_list", "EnumVariantList"]] ),
"EnumVariantList": ( collections: [["variants", "EnumVariant"]] ),
"EnumVariant": ( traits: ["NameOwner", "DocCommentsOwner", "AttrsOwner"], options: ["Expr"] ),
"TraitDef": ( traits: ["VisibilityOwner", "NameOwner", "AttrsOwner", "DocCommentsOwner", "TypeParamsOwner"] ),
"TraitDef": (
traits: ["VisibilityOwner", "NameOwner", "AttrsOwner", "DocCommentsOwner", "TypeParamsOwner"],
options: ["ItemList"]
),
"Module": (
traits: ["VisibilityOwner", "NameOwner", "AttrsOwner", "DocCommentsOwner" ],
options: [ "ItemList" ]

View file

@ -64,6 +64,12 @@ impl<N: AstNode> AstPtr<N> {
}
}
impl<N: AstNode> From<AstPtr<N>> for SyntaxNodePtr {
fn from(ptr: AstPtr<N>) -> SyntaxNodePtr {
ptr.raw
}
}
#[test]
fn test_local_syntax_ptr() {
use crate::{ast, AstNode};

View file

@ -1,2 +1,3 @@
extern crate foo;
extern crate foo as bar;
extern crate self as baz;

View file

@ -1,4 +1,4 @@
SOURCE_FILE@[0; 43)
SOURCE_FILE@[0; 69)
EXTERN_CRATE_ITEM@[0; 17)
EXTERN_KW@[0; 6)
WHITESPACE@[6; 7)
@ -23,3 +23,18 @@ SOURCE_FILE@[0; 43)
IDENT@[38; 41) "bar"
SEMI@[41; 42)
WHITESPACE@[42; 43)
EXTERN_CRATE_ITEM@[43; 68)
EXTERN_KW@[43; 49)
WHITESPACE@[49; 50)
CRATE_KW@[50; 55)
WHITESPACE@[55; 56)
SELF_KW@[56; 60)
SELF_KW@[56; 60)
WHITESPACE@[60; 61)
ALIAS@[61; 67)
AS_KW@[61; 63)
WHITESPACE@[63; 64)
NAME@[64; 67)
IDENT@[64; 67) "baz"
SEMI@[67; 68)
WHITESPACE@[68; 69)

View file

@ -115,7 +115,11 @@ pub fn install_rustfmt() -> Result<()> {
}
pub fn install_format_hook() -> Result<()> {
let result_path = Path::new("./.git/hooks/pre-commit");
let result_path = Path::new(if cfg!(windows) {
"./.git/hooks/pre-commit.exe"
} else {
"./.git/hooks/pre-commit"
});
if !result_path.exists() {
run("cargo build --package tools --bin pre-commit", ".")?;
if cfg!(windows) {

View file

@ -130,19 +130,6 @@ APIs in this crate are IDE centric: they take text offsets as input and produce
offsets and strings as output. This works on top of rich code model powered by
`hir`.
### `crates/ra_ide_api_light`
All IDE features which can be implemented if you only have access to a single
file. `ra_ide_api_light` could be used to enhance editing of Rust code without
the need to fiddle with build-systems, file synchronization and such.
In a sense, `ra_ide_api_light` is just a bunch of pure functions which take a
syntax tree as input.
The tests for `ra_ide_api_light` are `#[cfg(test)] mod tests` unit-tests spread
throughout its modules.
### `crates/ra_lsp_server`
An LSP implementation which wraps `ra_ide_api` into a langauge server protocol.

View file

@ -210,7 +210,7 @@ fn main() {
}
```
-- Fill struct fields
- Fill struct fields
```rust
// before:
@ -270,7 +270,22 @@ fn foo() {
}
```
-- Remove `dbg!`
- Inline local variable:
```rust
// before:
fn foo() {
let a<|> = 1 + 1;
let b = a * 10;
}
// after:
fn foo() {
let b = (1 + 1) * 10;
}
```
- Remove `dbg!`
```rust
// before:

View file

@ -159,7 +159,7 @@
(interactive (list (rust-analyzer--select-runnable)))
(-let (((&hash "env" "bin" "args" "label") runnable))
(compilation-start
(string-join (cons bin args) " ")
(string-join (append (list bin) args '()) " ")
;; cargo-process-mode is nice, but try to work without it...
(if (functionp 'cargo-process-mode) 'cargo-process-mode nil)
(lambda (_) (concat "*" label "*")))